def test_job_transformation(pulse_jobs, transformed_pulse_jobs, result_set_stored): revision = result_set_stored[0]["revisions"][0]["revision"][:12] rs_lookup = {revision: {"revision_hash": "123"}} jl = JobLoader() validated_jobs = jl._get_validated_jobs_by_project(pulse_jobs) import json import pprint for (idx, job) in enumerate(validated_jobs["test_treeherder_jobs"]): xformed = jl.transform(job, rs_lookup) pprint.pprint(xformed) # assert transformed_pulse_jobs[idx] == jl.transform(job, rs_lookup) assert transformed_pulse_jobs[idx] == json.loads(json.dumps(jl.transform(job, rs_lookup)))
def test_job_transformation(pulse_jobs, transformed_pulse_jobs): jl = JobLoader() validated_jobs = jl._get_validated_jobs_by_project(pulse_jobs) import json for (idx, job) in enumerate(validated_jobs["test_treeherder_jobs"]): assert transformed_pulse_jobs[idx] == json.loads( json.dumps(jl.transform(job)))
def test_job_transformation(pulse_jobs, transformed_pulse_jobs): import json jl = JobLoader() for idx, pulse_job in enumerate(pulse_jobs): assert jl._is_valid_job(pulse_job) assert transformed_pulse_jobs[idx] == json.loads( json.dumps(jl.transform(pulse_job)))
def handle(self, *args, **options): UpdateJobFixtures.maxMessages = 100 self.stdout.write("The Pulse consumer will consume {number} messages".format(number=UpdateJobFixtures.maxMessages)) with pulse_conn as connection: consumer = prepare_consumer( connection, UpdateJobFixtures, job_sources, lambda key: "#.{}".format(key), ) try: consumer.run() except Exception: tc_messages = {} tc_tasks = {} th_jobs = {} jl = JobLoader() for message in consumer.messages: taskId = message["payload"]["status"]["taskId"] task = fetchTask(taskId) runs = handleMessage(message, task) for run in runs: try: th_jobs[taskId] = jl.transform(run) tc_messages[taskId] = message tc_tasks[taskId] = task except Exception: logger.info('Issue validating this message: %s', run) logger.info("Updating Taskcluster jobs: %s entries", len(tc_messages)) with open(os.path.join(tests_path, 'taskcluster_pulse_messages.json'), 'w') as fh: # Write new line at the end to satisfy prettier fh.write(json.dumps(tc_messages, sort_keys=True, indent=2) + "\n") logger.info("Updating Taskcluster task: %s entries", len(tc_tasks)) with open(os.path.join(tests_path, 'taskcluster_tasks.json'), 'w') as fh: # Write new line at the end to satisfy prettier fh.write(json.dumps(tc_tasks, sort_keys=True, indent=2) + "\n") logger.info("Updating transformed messages: %s entries", len(th_jobs)) with open(os.path.join(tests_path, 'taskcluster_transformed_jobs.json'), 'w') as fh: # Write new line at the end to satisfy prettier fh.write(json.dumps(th_jobs, sort_keys=True, indent=2) + "\n") self.stdout.write("Pulse Job listening stopped...")
def test_job_transformation(pulse_jobs, transformed_pulse_jobs): jl = JobLoader() validated_jobs = jl._get_validated_jobs_by_project(pulse_jobs) import json for (idx, job) in enumerate(validated_jobs["test_treeherder_jobs"]): assert transformed_pulse_jobs[idx] == json.loads(json.dumps(jl.transform(job)))
def test_job_transformation(pulse_jobs, transformed_pulse_jobs): import json jl = JobLoader() for idx, pulse_job in enumerate(pulse_jobs): assert jl._is_valid_job(pulse_job) assert transformed_pulse_jobs[idx] == json.loads(json.dumps(jl.transform(pulse_job)))