def test_post_job_collection(self): """Can add a treeherder collections to a TreeherderRequest.""" tjc = TreeherderJobCollection() for job in self.job_data: tjc.add(tjc.get_job(job)) client = TreeherderClient( server_url='http://host', client_id='client-abc', secret='secret123', ) def request_callback(request): # Check that the expected content was POSTed. posted_json = json.loads(request.body) self.assertEqual(posted_json, tjc.get_collection_data()) return (200, {}, '{"message": "Job successfully updated"}') url = client._get_endpoint_url(tjc.endpoint_base, project='project') responses.add_callback(responses.POST, url, match_querystring=True, callback=request_callback, content_type='application/json') client.post_collection('project', tjc)
def test_send_artifact_collection(self): """Can add a artifact collections to a TreeherderRequest.""" tac = TreeherderArtifactCollection() for artifact in self.artifact_data: tac.add(tac.get_artifact(artifact)) client = TreeherderClient( server_url='http://host', client_id='client-abc', secret='secret123', ) def request_callback(request): # Check that the expected content was POSTed. posted_json = json.loads(request.body) self.assertEqual(posted_json, tac.get_collection_data()) return (200, {}, '{"message": "Artifacts stored successfully"}') url = client._get_endpoint_url(tac.endpoint_base, project='project') responses.add_callback(responses.POST, url, match_querystring=True, callback=request_callback, content_type='application/json') client.post_collection('project', tac)
def test_send_result_collection(self): """Can add a treeherder collections to a TreeherderRequest.""" trc = TreeherderResultSetCollection() for resultset in self.resultset_data: trc.add(trc.get_resultset(resultset)) client = TreeherderClient( server_url='http://host', client_id='client-abc', secret='secret123', ) def request_callback(request): # Check that the expected content was POSTed. posted_json = json.loads(request.body) self.assertEqual(posted_json, trc.get_collection_data()) return ( 200, {}, '{"message": "well-formed JSON stored", "resultsets": [123, 456]}' ) url = client._get_endpoint_url(trc.endpoint_base, project='project') responses.add_callback(responses.POST, url, match_querystring=True, callback=request_callback, content_type='application/json') client.post_collection('project', trc)
def ingest_git_pushes(project, dry_run=False): """ This method takes all commits for a repo from Github and determines which ones are considered part of a push or a merge. Treeherder groups commits by push. Once we determine which commits are considered the tip revision for a push/merge we then ingest it. Once we complete the ingestion we compare Treeherder's push API and compare if the pushes are sorted the same way as in Github. """ if not GITHUB_TOKEN: raise Exception( "Set GITHUB_TOKEN env variable to avoid rate limiting - Visit https://github.com/settings/tokens." ) logger.info("--> Converting Github commits to pushes") _repo = repo_meta(project) owner, repo = _repo["owner"], _repo["repo"] github_commits = github.commits_info(owner, repo) not_push_revision = [] push_revision = [] push_to_date = {} for _commit in github_commits: info = github.commit_info(owner, repo, _commit["sha"]) # Revisions that are marked as non-push should be ignored if _commit["sha"] in not_push_revision: logger.debug("Not a revision of a push: {}".format(_commit["sha"])) continue # Establish which revisions to ignore for index, parent in enumerate(info["parents"]): if index != 0: not_push_revision.append(parent["sha"]) # The 1st parent is the push from `master` from which we forked oldest_parent_revision = info["parents"][0]["sha"] push_to_date[oldest_parent_revision] = info["commit"]["committer"][ "date"] logger.info("Push: {} - Date: {}".format( oldest_parent_revision, push_to_date[oldest_parent_revision])) push_revision.append(_commit["sha"]) if not dry_run: logger.info("--> Ingest Github pushes") for revision in push_revision: ingest_git_push(project, revision) # Test that the *order* of the pushes is correct logger.info( "--> Validating that the ingested pushes are in the right order") client = TreeherderClient(server_url="http://localhost:8000") th_pushes = client.get_pushes(project, count=len(push_revision)) assert len(push_revision) == len(th_pushes) for index, revision in enumerate(push_revision): if revision != th_pushes[index]["revision"]: logger.warning("{} does not match {}".format( revision, th_pushes[index]["revision"]))
def test_get_pushes(self): tdc = TreeherderClient() url = tdc._get_endpoint_url(tdc.PUSH_ENDPOINT, project='mozilla-inbound') content = { "meta": {"count": 3, "repository": "mozilla-inbound", "offset": 0}, "results": self.PUSHES, } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) pushes = tdc.get_pushes("mozilla-inbound") self.assertEqual(len(pushes), 3) self.assertEqual(pushes, self.PUSHES)
def test_get_results(self): tdc = TreeherderClient() url = tdc._get_endpoint_url(tdc.PUSH_ENDPOINT, project='mozilla-inbound') content = { "meta": {"count": 3, "repository": "mozilla-inbound", "offset": 0}, "results": self.PUSHES } responses.add(responses.GET, url, json=content, match_querystring=True, status=200) pushes = tdc.get_resultsets("mozilla-inbound") self.assertEqual(len(pushes), 3) self.assertEqual(pushes, self.PUSHES)
def test_hawkauth_setup(self): """Test that HawkAuth is correctly set up from the `client_id` and `secret` params.""" client = TreeherderClient( client_id='client-abc', secret='secret123', ) auth = client.session.auth assert isinstance(auth, HawkAuth) expected_credentials = { 'id': 'client-abc', 'key': 'secret123', 'algorithm': 'sha256' } self.assertEqual(auth.credentials, expected_credentials)
def post_collection(project, th_collection): # http://testserver is the Django test client's default host name. client = TreeherderClient(server_url='http://testserver') return client.post_collection(project, th_collection)
def post_collection(project, th_collection): client = TreeherderClient(server_url='http://localhost') return client.post_collection(project, th_collection)
def handle(self, *args, **options): c = TreeherderClient(server_url=options['server']) # options / option collection hashes for (uuid, props) in c.get_option_collection_hash().iteritems(): for prop in props: option, _ = Option.objects.get_or_create(name=prop['name']) OptionCollection.objects.get_or_create( option_collection_hash=uuid, option=option) # machine platforms for machine_platform in c.get_machine_platforms(): MachinePlatform.objects.get_or_create( os_name=machine_platform['os_name'], platform=machine_platform['platform'], architecture=machine_platform['architecture']) # machine for machine in c.get_machines(): Machine.objects.get_or_create( id=machine['id'], name=machine['name'], defaults={ 'first_timestamp': machine['first_timestamp'], 'last_timestamp': machine['last_timestamp'] }) # job group for job_group in c.get_job_groups(): JobGroup.objects.get_or_create( id=job_group['id'], symbol=job_group['symbol'], name=job_group['name'], defaults={ 'description': job_group['description'] }) # job type for job_type in c.get_job_types(): JobType.objects.get_or_create( id=job_type['id'], symbol=job_type['symbol'], name=job_type['name'], defaults={ 'description': job_type['description'] }) # product for product in c.get_products(): Product.objects.get_or_create( id=product['id'], name=product['name'], defaults={ 'description': product['description'] }) # failure classification for failure_classification in c.get_failure_classifications(): FailureClassification.objects.get_or_create( id=failure_classification['id'], name=failure_classification['name'], defaults={ 'description': failure_classification['description'] }) # build platform for build_platform in c.get_build_platforms(): BuildPlatform.objects.get_or_create( id=build_platform['id'], os_name=build_platform['os_name'], defaults={ 'platform': build_platform['platform'], 'architecture': build_platform['architecture'] }) # repository and repository group for repository in c.get_repositories(): rgroup, _ = RepositoryGroup.objects.get_or_create( name=repository['repository_group']['name'], description=repository['repository_group']['description'] ) Repository.objects.get_or_create( id=repository['id'], repository_group=rgroup, name=repository['name'], dvcs_type=repository['dvcs_type'], url=repository['url'], defaults={ 'codebase': repository['codebase'], 'description': repository['description'], 'active_status': repository['active_status'] })
def handle(self, *args, **options): c = TreeherderClient(server_url=options['server']) # options / option collection hashes for (uuid, props) in c.get_option_collection_hash().items(): for prop in props: option, _ = Option.objects.get_or_create(name=prop['name']) OptionCollection.objects.get_or_create( option_collection_hash=uuid, option=option) # machine platforms for machine_platform in c.get_machine_platforms(): MachinePlatform.objects.get_or_create( os_name=machine_platform['os_name'], platform=machine_platform['platform'], architecture=machine_platform['architecture']) # machine for machine in c.get_machines(): Machine.objects.get_or_create( id=machine['id'], name=machine['name'], defaults={ 'first_timestamp': machine['first_timestamp'], 'last_timestamp': machine['last_timestamp'] }) # job group for job_group in c.get_job_groups(): JobGroup.objects.get_or_create( id=job_group['id'], symbol=job_group['symbol'], name=job_group['name'], defaults={ 'description': job_group['description'] }) # job type for job_type in c.get_job_types(): JobType.objects.get_or_create( id=job_type['id'], symbol=job_type['symbol'], name=job_type['name'], defaults={ 'description': job_type['description'] }) # product for product in c.get_products(): Product.objects.get_or_create( id=product['id'], name=product['name'], defaults={ 'description': product['description'] }) # failure classification for failure_classification in c.get_failure_classifications(): FailureClassification.objects.get_or_create( id=failure_classification['id'], name=failure_classification['name'], defaults={ 'description': failure_classification['description'] }) # build platform for build_platform in c.get_build_platforms(): BuildPlatform.objects.get_or_create( id=build_platform['id'], os_name=build_platform['os_name'], defaults={ 'platform': build_platform['platform'], 'architecture': build_platform['architecture'] }) # repository and repository group for repository in c.get_repositories(): rgroup, _ = RepositoryGroup.objects.get_or_create( name=repository['repository_group']['name'], description=repository['repository_group']['description'] ) Repository.objects.get_or_create( id=repository['id'], repository_group=rgroup, name=repository['name'], dvcs_type=repository['dvcs_type'], url=repository['url'], defaults={ 'codebase': repository['codebase'], 'description': repository['description'], 'active_status': repository['active_status'] })