def test_resultset_create(sample_resultset, jm, initial_data): """ test posting data to the resultset endpoint via webtest. extected result are: - return code 200 - return message successful - 1 resultset stored in the jobs schema """ trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection(jm.project, trsc) assert resp.status_int == 200 assert resp.json['message'] == 'well-formed JSON stored' stored_objs = jm.get_jobs_dhub().execute( proc="jobs_test.selects.resultset_by_rev_hash", placeholders=[sample_resultset[0]['revision_hash']] ) assert len(stored_objs) == 1 assert stored_objs[0]['revision_hash'] == sample_resultset[0]['revision_hash'] jm.disconnect()
def create_resultset_collection(dataset): print("[DEBUG] ResultSet Collection:") print(dataset) trsc = TreeherderResultSetCollection() for data in dataset: trs = trsc.get_resultset() trs.add_push_timestamp(data["push_timestamp"]) trs.add_revision(data["revision"]) trs.add_author(data["author"]) # TODO: figure out where type is used # trs.add_type(data['type']) revisions = [] for rev in data["revisions"]: tr = trs.get_revision() tr.add_revision(rev["revision"]) tr.add_author(rev["author"]) tr.add_comment(rev["comment"]) tr.add_repository(rev["repository"]) revisions.append(tr) trs.add_revisions(revisions) trsc.add(trs) return trsc
def create_resultset_collection(dataset): print("[DEBUG] ResultSet Collection:") print(dataset) trsc = TreeherderResultSetCollection() for data in dataset: trs = trsc.get_resultset() trs.add_push_timestamp(data['push_timestamp']) trs.add_revision(data['revision']) trs.add_author(data['author']) # trs.add_type(data['type']) revisions = [] for rev in data['revisions']: tr = trs.get_revision() tr.add_revision(rev['revision']) tr.add_author(rev['author']) tr.add_comment(rev['comment']) tr.add_repository(rev['repository']) revisions.append(tr) trs.add_revisions(revisions) trsc.add(trs) return trsc
def test_resultset_create(sample_resultset, jm, initial_data): """ test posting data to the resultset endpoint via webtest. extected result are: - return code 200 - return message successful - 1 resultset stored in the jobs schema """ trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection(jm.project, trsc) assert resp.status_int == 200 assert resp.json['message'] == 'well-formed JSON stored' stored_objs = jm.get_jobs_dhub().execute( proc="jobs_test.selects.resultset_by_rev_hash", placeholders=[sample_resultset[0]['revision_hash']]) assert len(stored_objs) == 1 assert stored_objs[0]['revision_hash'] == sample_resultset[0][ 'revision_hash'] jm.disconnect()
def create_resultset_collection(dataset): print("[DEBUG] ResultSet Collection:") print(dataset) trsc = TreeherderResultSetCollection() for data in dataset: trs = trsc.get_resultset() trs.add_push_timestamp(data['push_timestamp']) trs.add_revision(data['revision']) trs.add_author(data['author']) # TODO: figure out where type is used # trs.add_type(data['type']) revisions = [] for rev in data['revisions']: tr = trs.get_revision() tr.add_revision(rev['revision']) tr.add_author(rev['author']) tr.add_comment(rev['comment']) tr.add_repository(rev['repository']) revisions.append(tr) trs.add_revisions(revisions) trsc.add(trs) return trsc
def test_resultset_collection(self): """Confirm the collection matches the sample data""" trc = TreeherderResultSetCollection() for resultset in self.resultset_data: trs = TreeherderResultSet(resultset) trc.add(trs) self.assertTrue(len(self.resultset_data) == len(trc.data))
def test_resultset_collection(self): """Confirm the collection matches the sample data""" trc = TreeherderResultSetCollection() for resultset in self.resultset_data: trs = TreeherderResultSet(resultset) trc.add(trs) self.assertTrue( len(self.resultset_data) == len(trc.data) )
def test_resultset_with_bad_key(sample_resultset, jm, initial_data): trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection(jm.project, trsc, status=403, consumer_key="horrible key") assert resp.status_int == 403 assert resp.json["response"] == "access_denied" assert resp.json["message"] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
def test_resultset_with_bad_secret(sample_resultset, jm, initial_data): trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection(jm.project, trsc, status=403, consumer_secret="horrible secret") assert resp.status_int == 403 assert resp.json["message"] == "Client authentication failed for project, {0}".format(jm.project) assert resp.json["response"] == "invalid_client"
def test_resultset_with_bad_secret(sample_resultset, jm, initial_data): trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection( jm.project, trsc, status=403, consumer_secret="horrible secret" ) assert resp.status_int == 403 assert resp.json['detail'] == "Client authentication failed for project, {0}".format(jm.project) assert resp.json['response'] == "invalid_client"
def test_resultset_with_bad_key(sample_resultset, jm, initial_data): trsc = TreeherderResultSetCollection() for rs in sample_resultset: rs = trsc.get_resultset(rs) trsc.add(rs) resp = test_utils.post_collection( jm.project, trsc, status=403, consumer_key="horrible key" ) assert resp.status_int == 403 assert resp.json['response'] == "access_denied" assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
def transform(self, pushlog, repository): # this contain the whole list of transformed pushes result_sets = [] # last push available if pushlog: last_push = max(pushlog.keys()) else: last_push = None th_collections = {} # iterate over the pushes for push in pushlog.values(): result_set = dict() result_set['push_timestamp'] = push['date'] result_set['revisions'] = [] # Author of the push/resultset result_set['author'] = push['user'] rev_hash_components = [] # iterate over the revisions for change in push['changesets']: revision = dict() # we need to get the short version of a revision # because buildapi doesn't provide the long one # and we need to match it revision['revision'] = change['node'][0:12] revision['files'] = change['files'] revision['author'] = change['author'] revision['branch'] = change['branch'] revision['comment'] = change['desc'] revision['repository'] = repository rev_hash_components.append(change['node']) rev_hash_components.append(change['branch']) # append the revision to the push result_set['revisions'].append(revision) result_set['revision_hash'] = generate_revision_hash( rev_hash_components) if repository not in th_collections: th_collections[repository] = TreeherderResultSetCollection() th_resultset = th_collections[repository].get_resultset(result_set) th_collections[repository].add(th_resultset) # cache the last push seen if last_push: cache.set("{0}:last_push".format(repository), last_push) return th_collections
def transform(self, pushlog, repository): # this contain the whole list of transformed pushes th_collections = {} # iterate over the pushes for push in pushlog.values(): result_set = dict() result_set['push_timestamp'] = push['date'] result_set['revisions'] = [] # Author of the push/resultset result_set['author'] = push['user'] result_set['active_status'] = push.get('active_status', 'active') rev_hash_components = [] # iterate over the revisions # we only want to ingest the last 200 revisions. for change in push['changesets'][-200:]: revision = dict() # we need to get the short version of a revision # because buildapi doesn't provide the long one # and we need to match it revision['revision'] = change['node'][0:12] revision['files'] = change['files'] revision['author'] = change['author'] revision['branch'] = change['branch'] revision['comment'] = change['desc'] revision['repository'] = repository rev_hash_components.append(change['node']) rev_hash_components.append(change['branch']) # append the revision to the push result_set['revisions'].append(revision) result_set['revision_hash'] = generate_revision_hash( rev_hash_components) if repository not in th_collections: th_collections[repository] = TreeherderResultSetCollection() th_resultset = th_collections[repository].get_resultset(result_set) th_collections[repository].add(th_resultset) return th_collections
def test_send_result_collection(self, mock_send): """Can add a treeherder collections to a TreeherderRequest.""" trc = TreeherderResultSetCollection() for resultset in self.resultset_data: trc.add( trc.get_resultset(resultset) ) req = TreeherderRequest( protocol='http', host='host', project='project', oauth_key='key', oauth_secret='secret', ) req.post(trc) self.assertEqual(mock_send.call_count, 1) self.assertEqual( trc.to_json(), mock_send.call_args_list[0][1]['data'] )
def test_resultset_sample_data(self): """Test all add methods for building result sets""" trsc = TreeherderResultSetCollection() for resultset in self.resultset_data: trs = TreeherderResultSet() trs.add_push_timestamp( resultset['push_timestamp'] ) trs.add_revision_hash( resultset['revision_hash'] ) trs.add_author( resultset['author'] ) trs.add_type( 'push' ) trs.add_artifact( 'push_data', 'push', { 'stuff':[1,2,3,4,5] } ) for revision in resultset['revisions']: tr = TreeherderRevision() tr.add_revision( revision['revision'] ) tr.add_author( revision['author'] ) tr.add_comment( revision['comment'] ) tr.add_files( revision['files'] ) tr.add_repository( revision['repository'] ) trs.add_revision(tr) self.compare_structs(trs.data, resultset) trsc.add(trs) # confirm we get the same thing if we initialize from # a resultset dict trs_struct = TreeherderResultSet(resultset) self.compare_structs(trs_struct.data, resultset)
def test_resultset_sample_data(self): """Test all add methods for building result sets""" trsc = TreeherderResultSetCollection() for resultset in self.resultset_data: trs = TreeherderResultSet() trs.add_push_timestamp(resultset['push_timestamp']) trs.add_revision_hash(resultset['revision_hash']) trs.add_author(resultset['author']) trs.add_type('push') trs.add_artifact('push_data', 'push', {'stuff': [1, 2, 3, 4, 5]}) for revision in resultset['revisions']: tr = TreeherderRevision() tr.add_revision(revision['revision']) tr.add_author(revision['author']) tr.add_comment(revision['comment']) tr.add_files(revision['files']) tr.add_repository(revision['repository']) trs.add_revision(tr) self.compare_structs(trs.data, resultset) trsc.add(trs) # confirm we get the same thing if we initialize from # a resultset dict trs_struct = TreeherderResultSet(resultset) self.compare_structs(trs_struct.data, resultset)
def test_send_result_collection(self, mock_send): """Can add a treeherder collections to a TreeherderRequest.""" trc = TreeherderResultSetCollection() for resultset in self.resultset_data: trc.add(trc.get_resultset(resultset)) req = TreeherderRequest( protocol='http', host='host', project='project', oauth_key='key', oauth_secret='secret', ) req.post(trc) self.assertEqual(mock_send.call_count, 1) self.assertEqual(trc.to_json(), mock_send.call_args_list[0][1]['data'])
def main(): submit_time, start_time, end_time = argv[1:4] config = get_config() app_revision, app_repository = get_app_information(config) files = get_files(config) build_version = get_build_version(os.path.basename(files[0])) push_time = int(os.stat(files[0]).st_ctime) results = steepleparse.parse(config['system']['logfile']) result_set_hash = create_revision_hash() trsc = TreeherderResultSetCollection() trs = trsc.get_resultset() trs.add_revision_hash(result_set_hash) trs.add_author('Firefox Nightly') trs.add_push_timestamp(push_time) tr = trs.get_revision() tr.add_revision(app_revision) tr.add_author('Firefox Nightly') tr.add_comment(build_version) tr.add_files([os.path.basename(f) for f in files]) tr.add_repository(app_repository) trs.add_revision(tr) trsc.add(trs) tjc = TreeherderJobCollection() tj = tjc.get_job() tj.add_revision_hash(result_set_hash) tj.add_project(config['repo']['project']) tj.add_job_guid(str(uuid.uuid4())) tj.add_group_name('WebRTC QA Tests') tj.add_group_symbol('WebRTC') tj.add_job_name('Endurance') tj.add_job_symbol('end') tj.add_build_info('linux', 'linux64', 'x86_64') tj.add_machine_info('linux', 'linux64', 'x86_64') tj.add_description('WebRTC Sunny Day') tj.add_option_collection({'opt': True}) # must not be {}! tj.add_reason('testing') tj.add_who('Mozilla Platform QA') tj.add_submit_timestamp(submit_time) tj.add_start_timestamp(start_time) tj.add_end_timestamp(end_time) tj.add_state('completed') tj.add_machine(socket.gethostname()) result_string = get_result_string(results) tj.add_result(result_string) if result_string != 'busted': summary = get_result_summary(results) tj.add_artifact('Job Info', 'json', summary) tj.add_artifact('Results', 'json', results) tjc.add(tj) print 'trsc = ' + json.dumps(json.loads(trsc.to_json()), sort_keys=True, indent=4, separators=(',', ': ')) print 'tjc = ' + json.dumps(json.loads(tjc.to_json()), sort_keys=True, indent=4, separators=(',', ': ')) req = TreeherderRequest( protocol='http', host=config['repo']['host'], project=config['repo']['project'], oauth_key=config['credentials']['key'], oauth_secret=config['credentials']['secret'] ) req.post(trsc) req.post(tjc)
def submit(perf_data, revision): print("[DEBUG] performance data:") print(perf_data) # TODO: read the correct guid from test result hashlen = len(revision['commit']) job_guid = ''.join( random.choice(string.letters + string.digits) for i in xrange(hashlen) ) trsc = TreeherderResultSetCollection() author = "{} <{}>".format(revision['author']['name'], revision['author']['email']) dataset = [ { # The top-most revision in the list of commits for a push. 'revision': revision['commit'], 'author': author, 'push_timestamp': int(revision['author']['timestamp']), 'type': 'push', # a list of revisions associated with the resultset. There should # be at least one. 'revisions': [ { 'comment': revision['subject'], 'revision': revision['commit'], 'repository': 'servo', 'author': author } ] } ] for data in dataset: trs = trsc.get_resultset() trs.add_push_timestamp(data['push_timestamp']) trs.add_revision(data['revision']) trs.add_author(data['author']) # trs.add_type(data['type']) revisions = [] for rev in data['revisions']: tr = trs.get_revision() tr.add_revision(rev['revision']) tr.add_author(rev['author']) tr.add_comment(rev['comment']) tr.add_repository(rev['repository']) revisions.append(tr) trs.add_revisions(revisions) trsc.add(trs) dataset = [ { 'project': 'servo', 'revision': revision['commit'], 'job': { 'job_guid': job_guid, 'product_name': 'servo', 'reason': 'scheduler', # TODO:What is `who` for? 'who': 'Servo', 'desc': 'Servo Page Load Time Tests', 'name': 'Servo Page Load Time', # The symbol representing the job displayed in # treeherder.allizom.org 'job_symbol': 'PL', # The symbol representing the job group in # treeherder.allizom.org 'group_symbol': 'SP', 'group_name': 'Servo Perf', # TODO: get the real timing from the test runner 'submit_timestamp': revision['author']['timestamp'], 'start_timestamp': revision['author']['timestamp'], 'end_timestamp': revision['author']['timestamp'], 'state': 'completed', 'result': 'success', 'machine': 'local-machine', # TODO: read platform test result 'build_platform': { 'platform': 'linux64', 'os_name': 'linux', 'architecture': 'x86_64' }, 'machine_platform': { 'platform': 'linux64', 'os_name': 'linux', 'architecture': 'x86_64' }, 'option_collection': {'opt': True}, # jobs can belong to different tiers # setting the tier here will determine which tier the job # belongs to. However, if a job is set as Tier of 1, but # belongs to the Tier 2 profile on the server, it will still # be saved as Tier 2. 'tier': 1, # the ``name`` of the log can be the default of "buildbot_text" # however, you can use a custom name. See below. # TODO: point this to the log when we have them uploaded 'log_references': [ { 'url': 'TBD', 'name': 'test log' } ], # The artifact can contain any kind of structured data # associated with a test. 'artifacts': [ { 'type': 'json', 'name': 'performance_data', # 'job_guid': job_guid, 'blob': perf_data # { # "performance_data": { # # that is not `talos`? # "framework": {"name": "talos"}, # "suites": [{ # "name": "performance.timing.domComplete", # "value": random.choice(range(15,25)), # "subtests": [ # {"name": "responseEnd", "value": 123}, # {"name": "loadEventEnd", "value": 223} # ] # }] # } # } }, { 'type': 'json', 'name': 'Job Info', # 'job_guid': job_guid, "blob": { "job_details": [ { "url": "https://www.github.com/servo/servo", "value": "website", "content_type": "link", "title": "Source code" } ] } } ], # List of job guids that were coalesced to this job 'coalesced': [] } } ] tjc = TreeherderJobCollection() for data in dataset: tj = tjc.get_job() tj.add_revision(data['revision']) tj.add_project(data['project']) tj.add_coalesced_guid(data['job']['coalesced']) tj.add_job_guid(data['job']['job_guid']) tj.add_job_name(data['job']['name']) tj.add_job_symbol(data['job']['job_symbol']) tj.add_group_name(data['job']['group_name']) tj.add_group_symbol(data['job']['group_symbol']) tj.add_description(data['job']['desc']) tj.add_product_name(data['job']['product_name']) tj.add_state(data['job']['state']) tj.add_result(data['job']['result']) tj.add_reason(data['job']['reason']) tj.add_who(data['job']['who']) tj.add_tier(data['job']['tier']) tj.add_submit_timestamp(data['job']['submit_timestamp']) tj.add_start_timestamp(data['job']['start_timestamp']) tj.add_end_timestamp(data['job']['end_timestamp']) tj.add_machine(data['job']['machine']) tj.add_build_info( data['job']['build_platform']['os_name'], data['job']['build_platform']['platform'], data['job']['build_platform']['architecture'] ) tj.add_machine_info( data['job']['machine_platform']['os_name'], data['job']['machine_platform']['platform'], data['job']['machine_platform']['architecture'] ) tj.add_option_collection(data['job']['option_collection']) # for log_reference in data['job']['log_references']: # tj.add_log_reference( 'buildbot_text', log_reference['url']) # data['artifact'] is a list of artifacts for artifact_data in data['job']['artifacts']: tj.add_artifact( artifact_data['name'], artifact_data['type'], artifact_data['blob'] ) tjc.add(tj) # TODO: extract this read credential code out of this function. with open('credential.json', 'rb') as f: cred = json.load(f) client = TreeherderClient(protocol='https', # host='local.treeherder.mozilla.org', host='treeherder.allizom.org', client_id=cred['client_id'], secret=cred['secret']) # data structure validation is automatically performed here, if validation # fails a TreeherderClientError is raised client.post_collection('servo', trsc) client.post_collection('servo', tjc)
def main(): result_revision_hash = create_revision_hash() trsc = TreeherderResultSetCollection() trs = trsc.get_resultset() # self.required_properties = { # 'revision_hash':{ 'len':50, 'cb':self.validate_existence }, # 'revisions':{ 'type':list, 'cb':self.validate_existence }, # 'author':{ 'len':150, 'cb':self.validate_existence } # } trs.add_revision_hash(result_revision_hash) trs.add_author('WebRTC QA Tests') trs.add_push_timestamp(int(time.time())) tr = trs.get_revision() # self.required_properties = { # 'revision':{ 'len':50, 'cb':self.validate_existence }, # 'repository':{ 'cb':self.validate_existence }, # 'files':{ 'type':list, 'cb':self.validate_existence }, # } tr.add_revision(create_revision_hash()[:12]) tr.add_author('Firefox Nightly') tr.add_comment('firefox-33.0a1.en-US') tr.add_files(['firefox-33.0a1.en-US.linux-i686.tar.bz2', 'firefox-33.0a1.en-US.linux-x86_64.tests.zip']) tr.add_repository( 'ftp://ftp.mozilla.org/pub/firefox/nightly/latest-mozilla-central/') trs.add_revision(tr) trsc.add(trs) tjc = TreeherderJobCollection() tj = tjc.get_job() # self.required_properties = { # 'revision_hash':{ 'len':50, 'cb':self.validate_existence }, # 'project':{ 'cb':self.validate_existence }, # 'job':{ 'type':dict, 'cb':self.validate_existence }, # 'job.job_guid':{ 'len':50, 'cb':self.validate_existence } # } tj.add_revision_hash(result_revision_hash) tj.add_project('qa-try') tj.add_job_guid(str(uuid.uuid4())) tj.add_build_info('linux', 'linux64', 'x86_64') tj.add_description('WebRTC Sunny Day') tj.add_machine_info('linux', 'linux64', 'x86_64') tj.add_end_timestamp(int(time.time()) - 5) tj.add_start_timestamp(int(time.time()) - 3600 * 3 - 5) tj.add_submit_timestamp(int(time.time()) - 3600 * 3 - 10) tj.add_state('completed') tj.add_machine('webrtc-server') tj.add_option_collection({'opt': True}) # must not be {}! tj.add_reason('testing') tj.add_result('success') # must be success/testfailed/busted tj.add_who('*****@*****.**') tj.add_group_name('WebRTC QA Tests') tj.add_group_symbol('WebRTC') tj.add_job_symbol('end') tj.add_job_name('Endurance') tj.add_artifact('Job Info', 'json', { "job_details": [ { 'title': 'Iterations:', 'value': '10782', 'content_type': 'text' }, { 'title': 'Errors:', 'value': '5', 'content_type': 'text' }, { 'title': 'Longest Pass Duration:', 'value': '2:58:36.5', 'content_type': 'text' } ], }) tjc.add(tj) key, secret = get_oauth_creds() project, host = get_repo_details() req = TreeherderRequest( protocol='http', host=host, project=project, oauth_key=key, oauth_secret=secret ) print 'trsc = ' + json.dumps(json.loads(trsc.to_json()), sort_keys=True, indent=4, separators=(',', ': ')) print 'tjc = ' + json.dumps(json.loads(tjc.to_json()), sort_keys=True, indent=4, separators=(',', ': ')) # print 'req.oauth_key = ' + req.oauth_key # print 'req.oauth_secret = ' + req.oauth_secret # uri = req.get_uri(trsc) # print 'req.get_uri() = ' + uri # print 'req.oauth_client.get_signed_uri() = ' + # req.oauth_client.get_signed_uri(trsc.to_json(), uri) req.post(trsc) req.post(tjc)