Esempio n. 1
0
    def submit(self, job):
        """Submit the job to treeherder.

        :param job: Treeherder job instance to use for submission.

        """
        job.add_submit_timestamp(int(time.time()))

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json', {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        logger.info('Sending results to Treeherder: {}'.format(job_collection.to_json()))
        url = urlparse(self.url)
        client = TreeherderClient(protocol=url.scheme, host=url.hostname,
                                  client_id=self.client_id, secret=self.secret)
        client.post_collection(self.repository, job_collection)

        logger.info('Results are available to view at: {}'.format(
                    urljoin(self.url,
                            JOB_FRAGMENT.format(repository=self.repository,
                                                revision=self.revision))))
Esempio n. 2
0
    def submit(self, job):
        """Submit the job to treeherder.

        :param job: Treeherder job instance to use for submission.

        """
        job.add_submit_timestamp(int(time.time()))

        if self._job_details:
            job.add_artifact('Job Info', 'json',
                             {'job_details': copy.deepcopy(self._job_details)})
            self._job_details = []

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        logger.info('Sending results to Treeherder: {}'.format(
            job_collection.to_json()))
        self.client.post_collection(self.repository, job_collection)

        logger.info('Results are available to view at: {}'.format(
            urljoin(
                self.client.server_url,
                JOB_FRAGMENT.format(repository=self.repository,
                                    revision=self.revision))))
def test_objectstore_create(job_sample, jm):
    """
    test posting data to the objectstore via webtest.
    extected result are:
    - return code 200
    - return message successful
    - 1 job stored in the objectstore
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(jm.project, tjc)

    assert resp.status_int == 200
    assert resp.json['message'] == 'well-formed JSON stored'

    stored_objs = jm.get_os_dhub().execute(
        proc="objectstore_test.selects.row_by_guid",
        placeholders=[job_sample["job"]["job_guid"]]
    )

    assert len(stored_objs) == 1

    assert stored_objs[0]['job_guid'] == job_sample["job"]["job_guid"]
    def submit(self, job, logs=None):
        logs = logs or []

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json',
                             {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        print('Sending results to Treeherder: {}'.format(
            job_collection.to_json()))
        url = urlparse(self.url)

        client = TreeherderClient(protocol=url.scheme,
                                  host=url.hostname,
                                  client_id=self.client_id,
                                  secret=self.secret)
        client.post_collection(self.repository, job_collection)

        print('Results are available to view at: {}'.format(
            urljoin(
                self.url,
                JOB_FRAGMENT.format(repository=self.repository,
                                    revision=self.revision))))
Esempio n. 5
0
    def submit(self, job):
        """Submit the job to treeherder.

        :param job: Treeherder job instance to use for submission.

        """
        job.add_submit_timestamp(int(time.time()))

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json',
                             {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        logger.info('Sending results to Treeherder: {}'.format(
            job_collection.to_json()))
        if self.url == 'mock':
            logger.info('Pretending to submit job')
            return

        url = urlparse(self.url)
        client = TreeherderClient(protocol=url.scheme,
                                  host=url.hostname,
                                  client_id=self.client_id,
                                  secret=self.secret)
        client.post_collection(self.repository, job_collection)

        logger.info('Results are available to view at: {}'.format(
            urljoin(
                self.url,
                JOB_FRAGMENT.format(repository=self.repository,
                                    revision=self.revision))))
Esempio n. 6
0
def test_objectstore_create(job_sample, jm):
    """
    test posting data to the objectstore via webtest.
    extected result are:
    - return code 200
    - return message successful
    - 1 job stored in the objectstore
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(jm.project, tjc)

    assert resp.status_int == 200
    assert resp.json['message'] == 'well-formed JSON stored'

    stored_objs = jm.get_os_dhub().execute(
        proc="objectstore_test.selects.row_by_guid",
        placeholders=[job_sample["job"]["job_guid"]])

    assert len(stored_objs) == 1

    assert stored_objs[0]['job_guid'] == job_sample["job"]["job_guid"]

    jm.disconnect()
def post_treeherder_jobs(client, fileNames):
    """
    Processes each file and submits a treeherder job with the data from each file.

    :param client: The TreeherderClient to use.
    :param fileNames: The files to process.
    """
    for name in fileNames:
        with gzip.open(name) as f:
            data = json.load(f)

        test_set = data['Slimtest-TalosTP5-Slow']

        nodes = test_set['nodes']
        repo = test_set.get('repo', 'mozilla-inbound')
        # Attempt to retrieve the revision from the metadata, otherwise parse
        # it from the file name which has the form <revision>.json.gz
        if 'revision' in test_set:
            revsion = test_set['revision']
        else:
            revision = os.path.basename(name).split('.')[0]

        tjc = TreeherderJobCollection()
        try:
            tjc.add(create_treeherder_job(repo, revision, client, nodes))
        except KeyError as e:
            print "Failed to generate data for %s: %s" % (revision, e)
            continue

        # NB: In theory we could batch these, but each collection has to be from
        #     the same repo and it's possible we have different repos in our
        #     dataset.
        client.post_collection(repo, tjc)
def post_treeherder_jobs(client, revisions, s3=None):
    """
    Processes each file and submits a treeherder job with the data from each file.

    :param client: The TreeherderClient to use.
    :param revisions: A dictionary of revisions and their associated data.
    :param s3: Optional Amazon S3 bucket to upload logs to.
    """
    successful = []
    for (revision, test_set) in revisions.iteritems():
        nodes = test_set['nodes']
        repo = test_set.get('repo', 'mozilla-inbound')

        tjc = TreeherderJobCollection()
        try:
            tjc.add(create_treeherder_job(repo, revision, client, nodes, s3))
        except KeyError as e:
            print "Failed to generate data for %s: %s, probably still running" % (revision, e)
            continue

        try:
            # NB: In theory we could batch these, but each collection has to be from
            #     the same repo and it's possible we have different repos in our
            #     dataset.
            client.post_collection(repo, tjc)
            #print tjc.to_json()

            successful.append(revision)
            print "Submitted perf data for %s to %s" % (revision, client.server_url)
        except Exception as e:
            print "Failed to submit data for %s: %s" % (revision, e)

    return successful
Esempio n. 9
0
 def serve_forever(self):
     logger = utils.getLogger()
     while not self.shutdown_requested:
         wait_seconds = 1    # avoid busy loop
         job = self.jobs.get_next_treeherder_job()
         if job:
             tjc = TreeherderJobCollection()
             for data in job['job_collection']:
                 tj = TreeherderJob(data)
                 tjc.add(tj)
             if self.post_request(job['machine'], job['project'], tjc,
                                  job['attempts'], job['last_attempt']):
                 self.jobs.treeherder_job_completed(job['id'])
                 wait_seconds = 0
             else:
                 attempts = int(job['attempts'])
                 wait_seconds = min(self.retry_wait * attempts, 3600)
                 logger.debug('AutophoneTreeherder waiting for %d seconds after '
                              'failed attempt %d',
                              wait_seconds, attempts)
         if wait_seconds > 0:
             for i in range(wait_seconds):
                 if self.shutdown_requested:
                     break
                 time.sleep(1)
    def submit_pending(self, jobs):
        """Submit jobs pending notifications to Treeherder
        :param jobs: Lists of jobs to be reported. (TestJob)
        """
        self.logger.debug(type(self).__name__ +
                          '.submit_pending: jobs =\n%s' % jobs)
        if not self.url or not jobs:
            self.logger.debug(type(self).__name__ +
                              '.submit_pending: no url/job')
            return

        tjc = TreeherderJobCollection()

        for j in jobs:
            project = j.build['repo']
            revision = j.build['revision']
            revision_hash = self.request_revision_hash(project, revision)
            if not revision_hash:
                self.logger.debug(type(self).__name__ +
                                  '.submit_pending: no revision hash')
                return
            j.submit_timestamp = timestamp_now()

            self.logger.info('creating Treeherder job %s for %s %s, '
                             'revision_hash: %s' % (j.job_guid,
                                                    j.name, project,
                                                    revision_hash))

            tj = tjc.get_job()
            tj.add_description(j.description)
            tj.add_reason(j.reason)
            tj.add_tier(self.tier)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_who(j.who)
            tj.add_job_guid(j.job_guid)
            tj.add_job_name(j.job_name)
            tj.add_job_symbol(j.job_symbol)
            tj.add_group_name(j.group_name)
            tj.add_group_symbol(j.group_symbol)
            tj.add_product_name(j.build['product'])
            tj.add_state(JobState.PENDING)
            tj.add_submit_timestamp(j.submit_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_start_timestamp(j.submit_timestamp)
            tj.add_end_timestamp(j.submit_timestamp)
            tj.add_build_url(j.build_url)
            tj.add_build_info(j.build['os_name'],
                              j.build['platform'],
                              j.build['architecture'])
            tj.add_machine(j.machine['host'])
            tj.add_machine_info(j.machine['os_name'],
                                j.machine['platform'],
                                j.machine['architecture'])
            # TODO determine type of build
            tj.add_option_collection({'opt': True})

            tjc.add(tj)

        self.post_request(project, tjc, j.job_guid)
Esempio n. 11
0
    def submit_pending(self, jobs):
        """Submit jobs pending notifications to Treeherder
        :param jobs: Lists of jobs to be reported. (TestJob)
        """
        self.logger.debug(type(self).__name__ +
                          '.submit_pending: jobs =\n%s' % jobs)
        if not self.url or not jobs:
            self.logger.debug(type(self).__name__ +
                              '.submit_pending: no url/job')
            return

        tjc = TreeherderJobCollection()

        for j in jobs:
            project = j.build['repo']
            revision = j.build['revision']
            revision_hash = self.request_revision_hash(project, revision)
            if not revision_hash:
                self.logger.debug(type(self).__name__ +
                                  '.submit_pending: no revision hash')
                return
            j.submit_timestamp = timestamp_now()

            self.logger.info('creating Treeherder job %s for %s %s, '
                             'revision_hash: %s' % (j.job_guid,
                                                    j.name, project,
                                                    revision_hash))

            tj = tjc.get_job()
            tj.add_description(j.description)
            tj.add_reason(j.reason)
            tj.add_tier(self.tier)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_who(j.who)
            tj.add_job_guid(j.job_guid)
            tj.add_job_name(j.job_name)
            tj.add_job_symbol(j.job_symbol)
            tj.add_group_name(j.group_name)
            tj.add_group_symbol(j.group_symbol)
            tj.add_product_name(j.build['product'])
            tj.add_state(JobState.PENDING)
            tj.add_submit_timestamp(j.submit_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_start_timestamp(j.submit_timestamp)
            tj.add_end_timestamp(j.submit_timestamp)
            tj.add_build_info(j.build['os_name'],
                              j.build['platform'],
                              j.build['architecture'])
            tj.add_machine(j.machine['host'])
            tj.add_machine_info(j.machine['os_name'],
                                j.machine['platform'],
                                j.machine['architecture'])
            # TODO determine type of build
            tj.add_option_collection({'opt': True})

            tjc.add(tj)

        self.post_request(project, tjc, j.job_guid)
Esempio n. 12
0
    def submit_running(self, machine, build_url, project, revision_hash, tests=None):
        """Submit tests running notifications to Treeherder

        :param machine: machine id
        :param build_url: url to build being tested.
        :param project: repository of build.
        :param revision_hash: Treeherder revision hash of build.
        :param tests: Lists of tests to be reported.
        """
        if tests is None:
            tests = []
        logger.debug('AutophoneTreeherder.submit_running: %s' % tests)
        if not self.url or not revision_hash:
            logger.debug('AutophoneTreeherder.submit_running: no url/revision hash')
            return

        tjc = TreeherderJobCollection()

        for t in tests:
            logger.debug('AutophoneTreeherder.submit_running: '
                         'for %s %s' % (t.name, project))

            t.submit_timestamp = timestamp_now()
            t.start_timestamp = timestamp_now()

            tj = tjc.get_job()
            tj.add_tier(self.options.treeherder_tier)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_job_guid(t.job_guid)
            tj.add_job_name(t.job_name)
            tj.add_job_symbol(t.job_symbol)
            tj.add_group_name(t.group_name)
            tj.add_group_symbol(t.group_symbol)
            tj.add_product_name('fennec')
            tj.add_state(TestState.RUNNING)
            tj.add_submit_timestamp(t.submit_timestamp)
            tj.add_start_timestamp(t.start_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_end_timestamp(0)
            #
            tj.add_machine(machine)
            tj.add_build_info('android', t.phone.platform, t.phone.architecture)
            tj.add_machine_info('android',t.phone.platform, t.phone.architecture)
            tj.add_option_collection({'opt': True})

            tj.add_artifact('buildapi', 'json', {
                'buildername': t.get_buildername(project)})
            tj.add_artifact('privatebuild', 'json', {
                'build_url': build_url,
                'config_file': t.config_file,
                'chunk': t.chunk})
            tjc.add(tj)

        logger.debug('AutophoneTreeherder.submit_running: tjc: %s' %
                     tjc.to_json())

        self.queue_request(machine, project, tjc)
    def test_job_collection(self):
        """Confirm the collection matches the sample data"""
        tjc = TreeherderJobCollection()

        for job in self.job_data:
            tj = TreeherderJob(job)
            tjc.add(tj)

        self.assertTrue( len(self.job_data) == len(tjc.data) )
    def test_job_collection_job_type(self):
        """Confirm that the job_type argument changes the endpoint_base property"""

        tjc = TreeherderJobCollection()

        tjc_update = TreeherderJobCollection(job_type='update')

        self.assertTrue(tjc.endpoint_base, 'objectstore')
        self.assertTrue(tjc_update.endpoint_base, 'jobs')
    def test_job_collection(self):
        """Confirm the collection matches the sample data"""
        tjc = TreeherderJobCollection()

        for job in self.job_data:
            tj = TreeherderJob(job)
            tjc.add(tj)

        self.assertTrue(len(self.job_data) == len(tjc.data))
Esempio n. 16
0
def create_job_collection(dataset):
    print("[DEBUG] Job Collection:")
    print(dataset)

    tjc = TreeherderJobCollection()

    for data in dataset:

        tj = tjc.get_job()

        tj.add_revision(data['revision'])
        tj.add_project(data['project'])
        tj.add_coalesced_guid(data['job']['coalesced'])
        tj.add_job_guid(data['job']['job_guid'])
        tj.add_job_name(data['job']['name'])
        tj.add_job_symbol(data['job']['job_symbol'])
        tj.add_group_name(data['job']['group_name'])
        tj.add_group_symbol(data['job']['group_symbol'])
        tj.add_description(data['job']['desc'])
        tj.add_product_name(data['job']['product_name'])
        tj.add_state(data['job']['state'])
        tj.add_result(data['job']['result'])
        tj.add_reason(data['job']['reason'])
        tj.add_who(data['job']['who'])
        tj.add_tier(data['job']['tier'])
        tj.add_submit_timestamp(data['job']['submit_timestamp'])
        tj.add_start_timestamp(data['job']['start_timestamp'])
        tj.add_end_timestamp(data['job']['end_timestamp'])
        tj.add_machine(data['job']['machine'])

        tj.add_build_info(
            data['job']['build_platform']['os_name'],
            data['job']['build_platform']['platform'],
            data['job']['build_platform']['architecture']
        )

        tj.add_machine_info(
            data['job']['machine_platform']['os_name'],
            data['job']['machine_platform']['platform'],
            data['job']['machine_platform']['architecture']
        )

        tj.add_option_collection(data['job']['option_collection'])

        # for log_reference in data['job']['log_references']:
        #    tj.add_log_reference( 'buildbot_text', log_reference['url'])

        # data['artifact'] is a list of artifacts
        for artifact_data in data['job']['artifacts']:
            tj.add_artifact(
                artifact_data['name'],
                artifact_data['type'],
                artifact_data['blob']
            )
        tjc.add(tj)

        return tjc
Esempio n. 17
0
    def submit_pending(self, tests=[]):
        self.worker.loggerdeco.debug('AutophoneTreeherder.submit_pending: %s' % tests)
        if not self.url or not self.worker.build.revision_hash:
            self.worker.loggerdeco.debug('AutophoneTreeherder.submit_pending: no url/revision hash')
            return

        tjc = TreeherderJobCollection(job_type='update')

        if not tests:
            tests = self.worker.runnable_tests

        for t in tests:
            t.message = None
            t.submit_timestamp = timestamp_now()
            t.job_guid = generate_guid()
            t.job_details = []

            self.worker.loggerdeco.info('creating Treeherder job %s for %s %s, '
                                        'revision: %s, revision_hash: %s' % (
                                            t.job_guid, t.name, t.build.tree,
                                            t.build.revision, t.build.revision_hash))

            self.worker.loggerdeco.debug('AutophoneTreeherder.submit_pending: '
                                         'test config_file=%s, config sections=%s' % (
                                             t.config_file, t.cfg.sections()))

            tj = tjc.get_job()
            tj.add_revision_hash(self.worker.build.revision_hash)
            tj.add_project(self.worker.build.tree)
            tj.add_job_guid(t.job_guid)
            tj.add_job_name(t.job_name)
            tj.add_job_symbol(t.job_symbol)
            tj.add_group_name(t.group_name)
            tj.add_group_symbol(t.group_symbol)
            tj.add_product_name('fennec')
            tj.add_state(TestState.PENDING)
            tj.add_submit_timestamp(t.submit_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_start_timestamp(t.submit_timestamp)
            tj.add_end_timestamp(t.submit_timestamp)
            #
            tj.add_machine(t.phone.id)
            tj.add_build_url(self.worker.build.url)
            tj.add_build_info('android', t.phone.platform, t.phone.architecture)
            tj.add_machine_info('android',t.phone.platform, t.phone.architecture)
            tj.add_option_collection({'opt': True})

            # Fake the buildername from buildbot...
            tj.add_artifact('buildapi', 'json', {'buildername': t.buildername})

            tjc.add(tj)

        self.worker.loggerdeco.debug('AutophoneTreeherder.submit_pending: tjc: %s' % (
            tjc.to_json()))

        self.post_request(tjc)
Esempio n. 18
0
def running_jobs_stored(jm, running_jobs, result_set_stored):
    """
    stores a list of buildapi running jobs into the objectstore
    """
    running_jobs.update(result_set_stored[0])

    tjc = TreeherderJobCollection(job_type='update')
    tj = tjc.get_job(running_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
Esempio n. 19
0
def running_jobs_stored(
        jm, running_jobs, result_set_stored):
    """
    stores a list of buildapi running jobs into the objectstore
    """
    running_jobs.update(result_set_stored[0])

    tjc = TreeherderJobCollection(job_type='update')
    tj = tjc.get_job(running_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
Esempio n. 20
0
def completed_jobs_stored(
        jm, completed_jobs, result_set_stored, mock_send_request):
    """
    stores a list of buildapi completed jobs into the objectstore
    """
    completed_jobs['revision_hash'] = result_set_stored[0]['revision_hash']

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(completed_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
Esempio n. 21
0
def completed_jobs_stored(jm, completed_jobs, result_set_stored,
                          mock_send_request):
    """
    stores a list of buildapi completed jobs into the objectstore
    """
    completed_jobs['revision_hash'] = result_set_stored[0]['revision_hash']

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(completed_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
Esempio n. 22
0
def create_job_collection(dataset):
    print("[DEBUG] Job Collection:")
    print(dataset)

    tjc = TreeherderJobCollection()

    for data in dataset:
        tj = tjc.get_job()

        tj.add_revision(data['revision'])
        tj.add_project(data['project'])
        tj.add_coalesced_guid(data['job']['coalesced'])
        tj.add_job_guid(data['job']['job_guid'])
        tj.add_job_name(data['job']['name'])
        tj.add_job_symbol(data['job']['job_symbol'])
        tj.add_group_name(data['job']['group_name'])
        tj.add_group_symbol(data['job']['group_symbol'])
        tj.add_description(data['job']['desc'])
        tj.add_product_name(data['job']['product_name'])
        tj.add_state(data['job']['state'])
        tj.add_result(data['job']['result'])
        tj.add_reason(data['job']['reason'])
        tj.add_who(data['job']['who'])
        tj.add_tier(data['job']['tier'])
        tj.add_submit_timestamp(data['job']['submit_timestamp'])
        tj.add_start_timestamp(data['job']['start_timestamp'])
        tj.add_end_timestamp(data['job']['end_timestamp'])
        tj.add_machine(data['job']['machine'])

        tj.add_build_info(
            data['job']['build_platform']['os_name'],
            data['job']['build_platform']['platform'],
            data['job']['build_platform']['architecture']
        )

        tj.add_machine_info(
            data['job']['machine_platform']['os_name'],
            data['job']['machine_platform']['platform'],
            data['job']['machine_platform']['architecture']
        )

        tj.add_option_collection(data['job']['option_collection'])

        for artifact_data in data['job']['artifacts']:
            tj.add_artifact(
                artifact_data['name'],
                artifact_data['type'],
                artifact_data['blob']
            )
        tjc.add(tj)

        return tjc
Esempio n. 23
0
def pending_jobs_stored(jm, pending_jobs, result_set_stored):
    """
    stores a list of buildapi pending jobs into the jobs store
    using BuildApiTreeHerderAdapter
    """

    pending_jobs.update(result_set_stored[0])

    tjc = TreeherderJobCollection(job_type='update')
    tj = tjc.get_job(pending_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
Esempio n. 24
0
    def create_job_collection(self, dataset):
        # reference the page about tjc ttps://github.com/mozilla/treeherder/blob/master/docs/submitting_data.rst
        tjc = TreeherderJobCollection()

        for data in dataset:
            tj = tjc.get_job()

            tj.add_revision(data['revision'])
            tj.add_project(data['project'])
            tj.add_coalesced_guid(data['job']['coalesced'])
            tj.add_job_guid(data['job']['job_guid'])
            tj.add_job_name(data['job']['name'])
            tj.add_job_symbol(data['job']['job_symbol'])
            tj.add_group_name(data['job']['group_name'])
            tj.add_group_symbol(data['job']['group_symbol'])
            tj.add_description(data['job']['desc'])
            tj.add_product_name(data['job']['product_name'])
            tj.add_state(data['job']['state'])
            tj.add_result(data['job']['result'])
            tj.add_reason(data['job']['reason'])
            tj.add_who(data['job']['who'])
            tj.add_tier(data['job']['tier'])
            tj.add_submit_timestamp(data['job']['submit_timestamp'])
            tj.add_start_timestamp(data['job']['start_timestamp'])
            tj.add_end_timestamp(data['job']['end_timestamp'])
            tj.add_machine(data['job']['machine'])

            tj.add_build_info(
                data['job']['build_platform']['os_name'],
                data['job']['build_platform']['platform'],
                data['job']['build_platform']['architecture']
            )

            tj.add_machine_info(
                data['job']['machine_platform']['os_name'],
                data['job']['machine_platform']['platform'],
                data['job']['machine_platform']['architecture']
            )

            tj.add_option_collection(data['job']['option_collection'])

            # data['artifact'] is a list of artifacts
            for artifact_data in data['job']['artifacts']:
                tj.add_artifact(
                    artifact_data['name'],
                    artifact_data['type'],
                    artifact_data['blob']
                )
            tjc.add(tj)
        return tjc
Esempio n. 25
0
def pending_jobs_stored(
        jm, pending_jobs, result_set_stored):
    """
    stores a list of buildapi pending jobs into the jobs store
    using BuildApiTreeHerderAdapter
    """

    pending_jobs.update(result_set_stored[0])

    tjc = TreeherderJobCollection(job_type='update')
    tj = tjc.get_job(pending_jobs)
    tjc.add(tj)

    test_utils.post_collection(jm.project, tjc)
    def test_send_without_oauth(
        self, mock_HTTPConnection, mock_time, mock_generate_nonce):

        """Can send data to the server."""
        mock_time.return_value = 1342229050
        mock_generate_nonce.return_value = "46810593"

        host = 'host'

        req = TreeherderRequest(
            protocol='http',
            host=host,
            project='project',
            oauth_key=None,
            oauth_secret=None,
            )

        mock_conn = mock_HTTPConnection.return_value
        mock_request = mock_conn.request
        mock_response = mock_conn.getresponse.return_value

        tjc = TreeherderJobCollection()

        for job in self.job_data:

            tjc.add( tjc.get_job(job) )
            break

        response = req.post(tjc)

        self.assertEqual(mock_HTTPConnection.call_count, 1)
        self.assertEqual(mock_HTTPConnection.call_args[0][0], host)
        self.assertEqual(mock_response, response)
        self.assertEqual(mock_request.call_count, 1)

        uri = req.get_uri(tjc)

        method, path, data, header = mock_request.call_args[0]
        self.assertEqual(method, "POST")

        deserialized_data = json.loads(data)
        self.assertEqual(
            deserialized_data,
            tjc.get_collection_data()
            )

        self.assertEqual(
            header['Content-Type'],
            'application/json',
            )
    def submit_pending(self,
                       machine,
                       build_url,
                       project,
                       revision,
                       build_type,
                       build_abi,
                       build_platform,
                       build_sdk,
                       builder_type,
                       tests=[]):
        """Submit tests pending notifications to Treeherder

        :param machine: machine id
        :param build_url: url to build being tested.
        :param project: repository of build.
        :param revision: Either a URL to the changeset or the revision id.
        :param tests: Lists of tests to be reported.
        """
        logger = utils.getLogger()
        logger.debug('AutophoneTreeherder.submit_pending: %s', tests)
        if not self.url or not revision:
            logger.debug('AutophoneTreeherder.submit_pending: no url/revision')
            return

        tjc = TreeherderJobCollection()

        for t in tests:
            logger.debug('AutophoneTreeherder.submit_pending: for %s %s',
                         t.name, project)

            t.message = None
            t.submit_timestamp = timestamp_now()
            t.job_details = []

            tj = self._create_job(tjc, machine, build_url, project, revision,
                                  build_type, build_abi, build_platform,
                                  build_sdk, builder_type, t)
            tj.add_state(TestState.PENDING)
            tj.add_submit_timestamp(t.submit_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_start_timestamp(0)
            tj.add_end_timestamp(0)
            tjc.add(tj)

        logger.debug('AutophoneTreeherder.submit_pending: tjc: %s',
                     tjc.to_json())

        self.queue_request(machine, project, tjc)
Esempio n. 28
0
def create_job_collection(dataset):
    print("[DEBUG] Job Collection:")
    print(dataset)

    tjc = TreeherderJobCollection()

    for data in dataset:
        tj = tjc.get_job()

        tj.add_revision(data["revision"])
        tj.add_project(data["project"])
        tj.add_coalesced_guid(data["job"]["coalesced"])
        tj.add_job_guid(data["job"]["job_guid"])
        tj.add_job_name(data["job"]["name"])
        tj.add_job_symbol(data["job"]["job_symbol"])
        tj.add_group_name(data["job"]["group_name"])
        tj.add_group_symbol(data["job"]["group_symbol"])
        tj.add_description(data["job"]["desc"])
        tj.add_product_name(data["job"]["product_name"])
        tj.add_state(data["job"]["state"])
        tj.add_result(data["job"]["result"])
        tj.add_reason(data["job"]["reason"])
        tj.add_who(data["job"]["who"])
        tj.add_tier(data["job"]["tier"])
        tj.add_submit_timestamp(data["job"]["submit_timestamp"])
        tj.add_start_timestamp(data["job"]["start_timestamp"])
        tj.add_end_timestamp(data["job"]["end_timestamp"])
        tj.add_machine(data["job"]["machine"])

        tj.add_build_info(
            data["job"]["build_platform"]["os_name"],
            data["job"]["build_platform"]["platform"],
            data["job"]["build_platform"]["architecture"],
        )

        tj.add_machine_info(
            data["job"]["machine_platform"]["os_name"],
            data["job"]["machine_platform"]["platform"],
            data["job"]["machine_platform"]["architecture"],
        )

        tj.add_option_collection(data["job"]["option_collection"])

        for artifact_data in data["job"]["artifacts"]:
            tj.add_artifact(artifact_data["name"], artifact_data["type"], artifact_data["blob"])
        tjc.add(tj)

        return tjc
Esempio n. 29
0
    def process_data(self, raw_data, message):
        try:
            data = super(
                TreeherderPulseDataAdapter,
                self
            ).process_data(raw_data, message)

            # load transformed data into the restful api
            if data and self.loaddata:

                th_collections = {}

                project = data['project']

                th_collections[project] = TreeherderJobCollection()
                tj = th_collections[project].get_job(data)
                th_collections[project].add(tj)

                try:
                    self.load(th_collections)
                # in case of a missing repositories log the error
                # but don't fail
                except Exception as e:
                    self.logger.error(e)
            return data
        except PulseMissingAttributesError as e:

            self.logger.error(e)
Esempio n. 30
0
 def serve_forever(self):
     while not self.shutdown_requested:
         wait = True
         job = self.jobs.get_next_treeherder_job()
         if job:
             tjc = TreeherderJobCollection()
             for data in job['job_collection']:
                 tj = TreeherderJob(data)
                 tjc.add(tj)
             if self.post_request(job['machine'], job['project'], tjc, job['attempts'], job['last_attempt']):
                 self.jobs.treeherder_job_completed(job['id'])
                 wait = False
         if wait:
             for i in range(self.retry_wait):
                 if self.shutdown_requested:
                     break
                 time.sleep(1)
Esempio n. 31
0
    def submit_running(self, tests=[]):
        self.worker.loggerdeco.debug('AutophoneTreeherder.submit_running: %s' % tests)
        if not self.url or not self.worker.build.revision_hash:
            self.worker.loggerdeco.debug('AutophoneTreeherder.submit_running: no url/revision hash')
            return

        tjc = TreeherderJobCollection(job_type='update')

        if not tests:
            tests = self.worker.runnable_tests

        for t in tests:
            self.worker.loggerdeco.debug('AutophoneTreeherder.submit_running: '
                                         'for %s %s' % (t.name, t.build.tree))

            t.start_timestamp = timestamp_now()

            tj = tjc.get_job()
            tj.add_revision_hash(self.worker.build.revision_hash)
            tj.add_project(self.worker.build.tree)
            tj.add_job_guid(t.job_guid)
            tj.add_job_name(t.job_name)
            tj.add_job_symbol(t.job_symbol)
            tj.add_group_name(t.group_name)
            tj.add_group_symbol(t.group_symbol)
            tj.add_product_name('fennec')
            tj.add_state(TestState.RUNNING)
            tj.add_submit_timestamp(t.submit_timestamp)
            tj.add_start_timestamp(t.start_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_end_timestamp(t.start_timestamp)
            #
            tj.add_machine(t.phone.id)
            tj.add_build_url(self.worker.build.url)
            tj.add_build_info('android', t.phone.platform, t.phone.architecture)
            tj.add_machine_info('android',t.phone.platform, t.phone.architecture)
            tj.add_option_collection({'opt': True})

            tj.add_artifact('buildapi', 'json', {'buildername': t.buildername})
            tjc.add(tj)

        self.worker.loggerdeco.debug('AutophoneTreeherder.submit_running: tjc: %s' %
                                     tjc.to_json())

        self.post_request(tjc)
    def create_job_collection(self, dataset):
        # reference the page about tjc ttps://github.com/mozilla/treeherder/blob/master/docs/submitting_data.rst
        tjc = TreeherderJobCollection()

        for data in dataset:
            tj = tjc.get_job()

            tj.add_revision(data['revision'])
            tj.add_project(data['project'])
            tj.add_coalesced_guid(data['job']['coalesced'])
            tj.add_job_guid(data['job']['job_guid'])
            tj.add_job_name(data['job']['name'])
            tj.add_job_symbol(data['job']['job_symbol'])
            tj.add_group_name(data['job']['group_name'])
            tj.add_group_symbol(data['job']['group_symbol'])
            tj.add_description(data['job']['desc'])
            tj.add_product_name(data['job']['product_name'])
            tj.add_state(data['job']['state'])
            tj.add_result(data['job']['result'])
            tj.add_reason(data['job']['reason'])
            tj.add_who(data['job']['who'])
            tj.add_tier(data['job']['tier'])
            tj.add_submit_timestamp(data['job']['submit_timestamp'])
            tj.add_start_timestamp(data['job']['start_timestamp'])
            tj.add_end_timestamp(data['job']['end_timestamp'])
            tj.add_machine(data['job']['machine'])

            tj.add_build_info(data['job']['build_platform']['os_name'],
                              data['job']['build_platform']['platform'],
                              data['job']['build_platform']['architecture'])

            tj.add_machine_info(
                data['job']['machine_platform']['os_name'],
                data['job']['machine_platform']['platform'],
                data['job']['machine_platform']['architecture'])

            tj.add_option_collection(data['job']['option_collection'])

            # data['artifact'] is a list of artifacts
            for artifact_data in data['job']['artifacts']:
                tj.add_artifact(artifact_data['name'], artifact_data['type'],
                                artifact_data['blob'])
            tjc.add(tj)
        return tjc
def test_objectstore_with_bad_key(job_sample, jm):
    """
    test calling with the wrong project key.
    extected result are:
    - return code 403
    - return message failed
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(
        jm.project, tjc, status=403, consumer_key='wrong key'
        )

    assert resp.status_int == 403
    assert resp.json['response'] == "access_denied"
    assert resp.json['message'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
def test_objectstore_with_bad_secret(job_sample, jm):
    """
    test calling with the wrong project secret.
    extected result are:
    - return code 403
    - return message authentication failed
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(
        jm.project, tjc, status=403, consumer_secret='not so secret'
        )

    assert resp.status_int == 403
    assert resp.json['message'] == "Client authentication failed for project, {0}".format(jm.project)
    assert resp.json['response'] == "invalid_client"
Esempio n. 35
0
def test_objectstore_with_bad_key(job_sample, jm):
    """
    test calling with the wrong project key.
    extected result are:
    - return code 403
    - return message failed
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(
        jm.project, tjc, status=403, consumer_key='wrong-key'
        )

    assert resp.status_int == 403
    assert resp.json['response'] == "access_denied"
    assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)
Esempio n. 36
0
def test_objectstore_with_bad_secret(job_sample, jm):
    """
    test calling with the wrong project secret.
    extected result are:
    - return code 403
    - return message authentication failed
    """

    tjc = TreeherderJobCollection()
    tj = tjc.get_job(job_sample)
    tjc.add(tj)

    resp = test_utils.post_collection(
        jm.project, tjc, status=403, consumer_secret='not-so-secret'
        )

    assert resp.status_int == 403
    assert resp.json['detail'] == "Client authentication failed for project, {0}".format(jm.project)
    assert resp.json['response'] == "invalid_client"
Esempio n. 37
0
    def submit_results(self, job):
        job.add_project(self.project)
        job.add_revision_hash(self.retrieve_revision_hash())
        job.add_submit_timestamp(int(time.time()))

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        # self.logger.info
        print('Sending results to Treeherder: %s' % job_collection.to_json())

        url = urlparse(self.url)
        client = TreeherderClient(protocol=url.scheme, host=url.hostname,
                                  client_id=self.client_id, secret=self.secret)
        client.post_collection(self.project, job_collection)

        # self.logger.info
        print('Results are available to view at: %s' % (
            urljoin(self.url,
                    REVISON_FRAGMENT % (self.project, self.revision))))
    def submit(self, job, logs=None):
        logs = logs or []

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json', {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        print('Sending results to Treeherder: {}'.format(job_collection.to_json()))
        url = urlparse(self.url)
       
        client = TreeherderClient(protocol=url.scheme, host=url.hostname,
                                  client_id=self.client_id, secret=self.secret)
        client.post_collection(self.repository, job_collection)

        print('Results are available to view at: {}'.format(
            urljoin(self.url,
                    JOB_FRAGMENT.format(repository=self.repository, revision=self.revision))))
Esempio n. 39
0
    def submit_running(self, machine, build_url, project, revision, build_type,
                       build_abi, build_platform, build_sdk, builder_type, tests=[]):
        """Submit tests running notifications to Treeherder

        :param machine: machine id
        :param build_url: url to build being tested.
        :param project: repository of build.
        :param revision: Either a URL to the changeset or the revision id.
        :param tests: Lists of tests to be reported.
        """
        logger = utils.getLogger()
        logger.debug('AutophoneTreeherder.submit_running: %s', tests)
        if not self.url or not revision:
            logger.debug('AutophoneTreeherder.submit_running: no url/revision')
            return

        tjc = TreeherderJobCollection()

        for t in tests:
            logger.debug('AutophoneTreeherder.submit_running: for %s %s', t.name, project)

            t.submit_timestamp = timestamp_now()
            t.start_timestamp = timestamp_now()

            tj = self._create_job(tjc, machine, build_url, project, revision,
                                  build_type, build_abi, build_platform,
                                  build_sdk, builder_type, t)
            tj.add_state(TestState.RUNNING)
            tj.add_submit_timestamp(t.submit_timestamp)
            tj.add_start_timestamp(t.start_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_end_timestamp(0)
            tjc.add(tj)

        logger.debug('AutophoneTreeherder.submit_running: tjc: %s',
                     tjc.to_json())

        self.queue_request(machine, project, tjc)
Esempio n. 40
0
    def submit(self, job):
        """Submit the job to treeherder.

        :param job: Treeherder job instance to use for submission.

        """
        job.add_submit_timestamp(int(time.time()))

        if self._job_details:
            job.add_artifact('Job Info', 'json',
                             {'job_details': copy.deepcopy(self._job_details)})
            self._job_details = []

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        logger.info('Sending results to Treeherder: {}'.format(job_collection.to_json()))
        self.client.post_collection(self.repository, job_collection)

        logger.info('Results are available to view at: {}'.format(
                    urljoin('{0}://{1}'.format(self.client.protocol, self.client.host),
                            JOB_FRAGMENT.format(repository=self.repository,
                                                revision=self.revision))))
    def test_send_job_collection(self, mock_send):
        """Can add a treeherder collections to a TreeherderRequest."""

        tjc = TreeherderJobCollection()

        for job in self.job_data:

            tjc.add( tjc.get_job(job) )

        req = TreeherderRequest(
            protocol='http',
            host='host',
            project='project',
            oauth_key='key',
            oauth_secret='secret',
            )

        req.post(tjc)

        self.assertEqual(mock_send.call_count, 1)
        self.assertEqual(
            tjc.to_json(),
            mock_send.call_args_list[0][1]['data']
            )
Esempio n. 42
0
 def serve_forever(self):
     while not self.shutdown_requested:
         wait_seconds = 1  # avoid busy loop
         job = self.jobs.get_next_treeherder_job()
         if job:
             tjc = TreeherderJobCollection()
             for data in job['job_collection']:
                 tj = TreeherderJob(data)
                 tjc.add(tj)
             if self.post_request(job['machine'], job['project'], tjc,
                                  job['attempts'], job['last_attempt']):
                 self.jobs.treeherder_job_completed(job['id'])
                 wait_seconds = 0
             else:
                 attempts = int(job['attempts'])
                 wait_seconds = min(self.retry_wait * attempts, 3600)
                 LOGGER.debug(
                     'AutophoneTreeherder waiting for %d seconds after '
                     'failed attempt %d', wait_seconds, attempts)
         if wait_seconds > 0:
             for i in range(wait_seconds):
                 if self.shutdown_requested:
                     break
                 time.sleep(1)
    def test_send_without_oauth(self, mock_HTTPConnection, mock_time,
                                mock_generate_nonce):
        """Can send data to the server."""
        mock_time.return_value = 1342229050
        mock_generate_nonce.return_value = "46810593"

        host = 'host'

        req = TreeherderRequest(
            protocol='http',
            host=host,
            project='project',
            oauth_key=None,
            oauth_secret=None,
        )

        mock_conn = mock_HTTPConnection.return_value
        mock_request = mock_conn.request
        mock_response = mock_conn.getresponse.return_value

        tjc = TreeherderJobCollection()

        for job in self.job_data:

            tjc.add(tjc.get_job(job))
            break

        response = req.post(tjc)

        self.assertEqual(mock_HTTPConnection.call_count, 1)
        self.assertEqual(mock_HTTPConnection.call_args[0][0], host)
        self.assertEqual(mock_response, response)
        self.assertEqual(mock_request.call_count, 1)

        uri = req.get_uri(tjc)

        method, path, data, header = mock_request.call_args[0]
        self.assertEqual(method, "POST")

        deserialized_data = json.loads(data)
        self.assertEqual(deserialized_data, tjc.get_collection_data())

        self.assertEqual(
            header['Content-Type'],
            'application/json',
        )
    def test_send_job_collection(self, mock_send):
        """Can add a treeherder collections to a TreeherderRequest."""

        tjc = TreeherderJobCollection()

        for job in self.job_data:

            tjc.add(tjc.get_job(job))

        req = TreeherderRequest(
            protocol='http',
            host='host',
            project='project',
            oauth_key='key',
            oauth_secret='secret',
        )

        req.post(tjc)

        self.assertEqual(mock_send.call_count, 1)
        self.assertEqual(tjc.to_json(), mock_send.call_args_list[0][1]['data'])
    def submit_complete(self,
                        machine,
                        build_url,
                        project,
                        revision,
                        build_type,
                        build_abi,
                        build_platform,
                        build_sdk,
                        builder_type,
                        tests=None):
        """Submit test results for the worker's current job to Treeherder.

        :param machine: machine id
        :param build_url: url to build being tested.
        :param project: repository of build.
        :param revision: Either a URL to the changeset or the revision id.
        :param tests: Lists of tests to be reported.
        """
        logger = utils.getLogger()
        logger.debug('AutophoneTreeherder.submit_complete: %s', tests)

        if not self.url or not revision:
            logger.debug(
                'AutophoneTreeherder.submit_complete: no url/revision')
            return

        tjc = TreeherderJobCollection()

        for t in tests:
            logger.debug('AutophoneTreeherder.submit_complete for %s %s',
                         t.name, project)

            t.end_timestamp = timestamp_now()
            # A usercancelled job may not have a start_timestamp
            # since it may have been cancelled before it started.
            if not t.start_timestamp:
                t.start_timestamp = t.end_timestamp

            tj = self._create_job(tjc, machine, build_url, project, revision,
                                  build_type, build_abi, build_platform,
                                  build_sdk, builder_type, t)
            tj.add_state(TestState.COMPLETED)
            tj.add_result(t.status)
            tj.add_submit_timestamp(t.submit_timestamp)
            tj.add_start_timestamp(t.start_timestamp)
            tj.add_end_timestamp(t.end_timestamp)

            t.job_details.append({
                'value': os.path.basename(t.config_file),
                'title': 'Config'
            })
            t.job_details.append({
                'url': build_url,
                'value': os.path.basename(build_url),
                'title': 'Build'
            })
            t.job_details.append({'value': utils.host(), 'title': 'Host'})

            if t.passed + t.failed + t.todo > 0:
                if t.failed == 0:
                    failed = '0'
                else:
                    failed = '<em class="testfail">%s</em>' % t.failed

                t.job_details.append({
                    'value':
                    "%s/%s/%s" % (t.passed, failed, t.todo),
                    'title':
                    "%s-%s" % (t.job_name, t.job_symbol)
                })

            if hasattr(t, 'phonedash_url'):
                t.job_details.append({
                    'url': t.phonedash_url,
                    'value': 'graph',
                    'title': 'phonedash'
                })

            # Attach log, ANRs, tombstones, etc.

            if self.s3_bucket:
                # We must make certain that S3 keys for uploaded files
                # are unique even in the event of retries. The
                # Treeherder logviewer limits the length of the log
                # url to 255 bytes. If the url length exceeds 255
                # characters it is truncated in the Treeherder
                # logviewer url field even though the file is
                # successfully uploaded to s3 with the full url. The
                # logviewer will fail to parse the log since it
                # attempts to retrieve it from a truncated url.

                # We have been creating unique keys through the use of
                # human readable "log_identifiers" combined with the
                # test's job_guid and base filename to create unique
                # keys for s3. Unfortunately, the choice of the aws
                # host name, a path based on the path to the build,
                # test names and config file names has resulted in
                # overly long urls which exceed 255 bytes. Given that
                # the s3 hostname and build url path currently consume
                # 100 bytes and the test's job-guid and filename
                # consume another 51, we only have a maximum of 104
                # bytes for the log_identifier. The safest course of
                # action is to eliminate the test name, test config
                # filename, the chunk and device name and rely solely
                # on the test's job_guid to provide uniqueness.

                log_identifier = t.job_guid

                key_prefix = os.path.dirname(urlparse.urlparse(build_url).path)
                key_prefix = re.sub('/tmp$', '', key_prefix)

                # Upload directory containing ANRs, tombstones and other items
                # to be uploaded.
                if t.upload_dir:
                    for f in utils.find_files(t.upload_dir):
                        try:
                            lname = os.path.relpath(f, t.upload_dir)
                            try:
                                fname = '%s-%s' % (log_identifier, lname)
                            except UnicodeDecodeError, e:
                                logger.exception(
                                    'Ignoring artifact %s',
                                    lname.decode('utf-8', errors='replace'))
                                continue
                            url = self.s3_bucket.upload(
                                f, "%s/%s" % (key_prefix, fname))
                            t.job_details.append({
                                'url': url,
                                'value': lname,
                                'title': 'artifact uploaded'
                            })
                        except (S3Error, IOError), e:
                            logger.exception('Error uploading artifact %s',
                                             fname)
                            t.job_details.append({
                                'value':
                                'Failed to upload artifact %s: %s' %
                                (fname, e),
                                'title':
                                'Error'
                            })

                # Autophone Log
                # Since we are submitting results to Treeherder, we flush
                # the worker's log before uploading the log to
                # Treeherder. When we upload the log, it will contain
                # results for a single test run with possibly an error
                # message from the previous test if the previous log
                # upload failed.
                try:
                    # Emit the final step marker, flush and close the
                    # log prior to uploading.
                    t.worker_subprocess.log_step('Submitting Log')
                    t.worker_subprocess.close_log()
                    fname = '%s-autophone.log' % log_identifier
                    lname = 'Autophone Log'
                    key = "%s/%s" % (key_prefix, fname)
                    url = self.s3_bucket.upload(t.worker_subprocess.logfile,
                                                key)
                    # Truncate the log once it has been submitted to S3
                    # but do not close the filehandler as that messes with
                    # the next test's log.
                    t.worker_subprocess.filehandler.stream.truncate(0)
                    t.job_details.append({
                        'url': url,
                        'value': lname,
                        'title': 'artifact uploaded'
                    })
                    tj.add_log_reference('buildbot_text',
                                         url,
                                         parse_status='pending')
                except Exception, e:
                    logger.exception('Error %s uploading %s', e, fname)
                    t.job_details.append({
                        'value':
                        'Failed to upload Autophone log: %s' % e,
                        'title':
                        'Error'
                    })
Esempio n. 46
0
    def post_to_treeherder(self, tests):
        self.logger.info('\nTREEHERDER\n----------')
        version = mozversion.get_version(binary=self.bin,
                                         sources=self.sources,
                                         dm_type='adb',
                                         device_serial=self.device_serial)

        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()

        device = version.get('device_id')
        if not device:
            self.logger.error('Submitting to Treeherder is currently limited '
                              'to devices.')
            return

        try:
            group = DEVICE_GROUP_MAP[device]
            job.add_group_name(group['name'])
            job.add_group_symbol(group['symbol'])
            job.add_job_name('Gaia Python Integration Test (%s)' % device)
            job.add_job_symbol('Gip')
        except KeyError:
            self.logger.error('Unknown device id: %s, unable to determine '
                              'Treeherder group. Supported device ids: %s' %
                              (device, DEVICE_GROUP_MAP.keys()))
            return

        # Determine revision hash from application revision
        revision = version['application_changeset']
        project = version['application_repository'].split('/')[-1]
        lookup_url = urljoin(
            self.treeherder_url,
            'api/project/%s/revision-lookup/?revision=%s' %
            (project, revision))
        self.logger.debug('Getting revision hash from: %s' % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), 'Unable to determine revision hash for %s. ' \
                                'Perhaps it has not been ingested by ' \
                                'Treeherder?' % revision
        revision_hash = response.json()[revision]['revision_hash']
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name('b2g')
        job.add_state('completed')

        # Determine test result
        if self.failed or self.unexpected_successes:
            job.add_result('testfailed')
        else:
            job.add_result('success')

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info('b2g', 'b2g-device-image', 'x86')
        job.add_machine_info('b2g', 'b2g-device-image', 'x86')

        # All B2G device builds are currently opt builds
        job.add_option_collection({'opt': True})

        # TODO: Add log reference
        # job.add_log_reference()

        date_format = '%d %b %Y %H:%M:%S'
        job_details = [{
            'content_type':
            'link',
            'title':
            'Gaia revision:',
            'url':
            'https://github.com/mozilla-b2g/gaia/commit/%s' %
            version.get('gaia_changeset'),
            'value':
            version.get('gaia_changeset'),
        }, {
            'content_type':
            'text',
            'title':
            'Gaia date:',
            'value':
            version.get('gaia_date') and time.strftime(
                date_format, time.localtime(int(version.get('gaia_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device identifier:',
            'value': version.get('device_id')
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (date):',
            'value':
            version.get('device_firmware_date') and time.strftime(
                date_format,
                time.localtime(int(version.get('device_firmware_date')))),
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (incremental):',
            'value':
            version.get('device_firmware_version_incremental')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (release):',
            'value': version.get('device_firmware_version_release')
        }]

        if self.ci_url:
            job_details.append({
                'url': self.ci_url,
                'value': self.ci_url,
                'content_type': 'link',
                'title': 'CI build:'
            })

        if job_details:
            job.add_artifact('Job Info', 'json', {'job_details': job_details})

        # TODO: Add XML/HTML reports as artifacts
        # job.add_artifact()

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(self.treeherder_url)
        request = TreeherderRequest(protocol=url.scheme,
                                    host=url.hostname,
                                    project=project,
                                    oauth_key=self.treeherder_key,
                                    oauth_secret=self.treeherder_secret)
        self.logger.debug('Sending results to Treeherder: %s' %
                          job_collection.to_json())
        response = request.post(job_collection)
        self.logger.debug('Response: %s' % response.read())
        assert response.status == 200, 'Failed to send results!'
        self.logger.info(
            'Results are available to view at: %s' %
            (urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' %
                     (project, revision))))
Esempio n. 47
0
    def post_to_treeherder(self, tests):
        version = mozversion.get_version(
            binary=self.bin, sources=self.sources,
            dm_type='adb', device_serial=self.device_serial)

        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()

        device = version.get('device_id')
        device_firmware_version_release = \
            version.get('device_firmware_version_release')

        if not device:
            self.logger.error('Submitting to Treeherder is currently limited '
                              'to devices.')
            return

        try:
            group = DEVICE_GROUP_MAP[device][device_firmware_version_release]
            job.add_group_name(group['name'])
            job.add_group_symbol(group['symbol'])
            job.add_job_name('Gaia Python Integration Test (%s)' % group['symbol'])
            job.add_job_symbol('Gip')
        except KeyError:
            self.logger.error('Unknown device id: %s or device firmware '
                              'version: %s. Unable to determine Treeherder '
                              'group. Supported devices: %s'
                              % (device, device_firmware_version_release,
                                 ['%s: %s' % (k, [fw for fw in v.keys()])
                                  for k, v in DEVICE_GROUP_MAP.iteritems()]))
            return

        # Determine revision hash from application revision
        revision = version['application_changeset']
        project = version['application_repository'].split('/')[-1]
        lookup_url = urljoin(
            self.treeherder_url,
            'api/project/%s/revision-lookup/?revision=%s' % (
                project, revision))
        self.logger.debug('Getting revision hash from: %s' % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), 'Unable to determine revision hash for %s. ' \
                                'Perhaps it has not been ingested by ' \
                                'Treeherder?' % revision
        revision_hash = response.json()[revision]['revision_hash']
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name('b2g')
        job.add_state('completed')

        # Determine test result
        if self.failed or self.unexpected_successes:
            job.add_result('testfailed')
        else:
            job.add_result('success')

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info('b2g', 'b2g-device-image', 'x86')
        job.add_machine_info('b2g', 'b2g-device-image', 'x86')

        # All B2G device builds are currently opt builds
        job.add_option_collection({'opt': True})

        date_format = '%d %b %Y %H:%M:%S'
        job_details = [{
            'content_type': 'link',
            'title': 'Gaia revision:',
            'url': 'https://github.com/mozilla-b2g/gaia/commit/%s' %
                   version.get('gaia_changeset'),
            'value': version.get('gaia_changeset'),
        }, {
            'content_type': 'text',
            'title': 'Gaia date:',
            'value': version.get('gaia_date') and time.strftime(
                date_format, time.localtime(int(version.get('gaia_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device identifier:',
            'value': version.get('device_id')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (date):',
            'value': version.get('device_firmware_date') and time.strftime(
                date_format, time.localtime(int(
                    version.get('device_firmware_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device firmware (incremental):',
            'value': version.get('device_firmware_version_incremental')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (release):',
            'value': version.get('device_firmware_version_release')
        }]

        ci_url = os.environ.get('BUILD_URL')
        if ci_url:
            job_details.append({
                'url': ci_url,
                'value': ci_url,
                'content_type': 'link',
                'title': 'CI build:'})

        # Attach logcat
        adb_device = ADBDevice(self.device_serial)
        with tempfile.NamedTemporaryFile(suffix='logcat.txt') as f:
            f.writelines(adb_device.get_logcat())
            self.logger.debug('Logcat stored in: %s' % f.name)
            try:
                url = self.upload_to_s3(f.name)
                job_details.append({
                    'url': url,
                    'value': 'logcat.txt',
                    'content_type': 'link',
                    'title': 'Log:'})
            except S3UploadError:
                job_details.append({
                    'value': 'Failed to upload logcat.txt',
                    'content_type': 'text',
                    'title': 'Error:'})

        # Attach log files
        handlers = [handler for handler in self.logger.handlers
                    if isinstance(handler, StreamHandler) and
                    os.path.exists(handler.stream.name)]
        for handler in handlers:
            path = handler.stream.name
            filename = os.path.split(path)[-1]
            try:
                url = self.upload_to_s3(path)
                job_details.append({
                    'url': url,
                    'value': filename,
                    'content_type': 'link',
                    'title': 'Log:'})
                # Add log reference
                if type(handler.formatter) is TbplFormatter or \
                        type(handler.formatter) is LogLevelFilter and \
                        type(handler.formatter.inner) is TbplFormatter:
                    job.add_log_reference(filename, url)
            except S3UploadError:
                job_details.append({
                    'value': 'Failed to upload %s' % filename,
                    'content_type': 'text',
                    'title': 'Error:'})

        # Attach reports
        for report in [self.html_output, self.xml_output]:
            if report is not None:
                filename = os.path.split(report)[-1]
                try:
                    url = self.upload_to_s3(report)
                    job_details.append({
                        'url': url,
                        'value': filename,
                        'content_type': 'link',
                        'title': 'Report:'})
                except S3UploadError:
                    job_details.append({
                        'value': 'Failed to upload %s' % filename,
                        'content_type': 'text',
                        'title': 'Error:'})

        if job_details:
            job.add_artifact('Job Info', 'json', {'job_details': job_details})

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(self.treeherder_url)
        request = TreeherderRequest(
            protocol=url.scheme,
            host=url.hostname,
            project=project,
            oauth_key=os.environ.get('TREEHERDER_KEY'),
            oauth_secret=os.environ.get('TREEHERDER_SECRET'))
        self.logger.debug('Sending results to Treeherder: %s' %
                          job_collection.to_json())
        response = request.post(job_collection)
        self.logger.debug('Response: %s' % response.read())
        assert response.status == 200, 'Failed to send results!'
        self.logger.info('Results are available to view at: %s' % (
            urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' % (
                project, revision))))
Esempio n. 48
0
    def post_to_treeherder(self, tests):
        version = mozversion.get_version(binary=self.bin,
                                         sources=self.sources,
                                         dm_type='adb',
                                         device_serial=self.device_serial)

        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()

        device = version.get('device_id')
        device_firmware_version_release = \
            version.get('device_firmware_version_release')

        if not device:
            self.logger.error('Submitting to Treeherder is currently limited '
                              'to devices.')
            return

        try:
            group = DEVICE_GROUP_MAP[device][device_firmware_version_release]
            job.add_group_name(group['name'])
            job.add_group_symbol(group['symbol'])
            job.add_job_name('Gaia Python Integration Test (%s)' %
                             group['symbol'])
            job.add_job_symbol('Gip')
        except KeyError:
            self.logger.error('Unknown device id: %s or device firmware '
                              'version: %s. Unable to determine Treeherder '
                              'group. Supported devices: %s' %
                              (device, device_firmware_version_release, [
                                  '%s: %s' % (k, [fw for fw in v.keys()])
                                  for k, v in DEVICE_GROUP_MAP.iteritems()
                              ]))
            return

        # Determine revision hash from application revision
        revision = version['application_changeset']
        project = version['application_repository'].split('/')[-1]
        lookup_url = urljoin(
            self.treeherder_url,
            'api/project/%s/revision-lookup/?revision=%s' %
            (project, revision))
        self.logger.debug('Getting revision hash from: %s' % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), 'Unable to determine revision hash for %s. ' \
                                'Perhaps it has not been ingested by ' \
                                'Treeherder?' % revision
        revision_hash = response.json()[revision]['revision_hash']
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name('b2g')
        job.add_state('completed')

        # Determine test result
        if self.failed or self.unexpected_successes:
            job.add_result('testfailed')
        else:
            job.add_result('success')

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info('b2g', 'b2g-device-image', 'x86')
        job.add_machine_info('b2g', 'b2g-device-image', 'x86')

        # All B2G device builds are currently opt builds
        job.add_option_collection({'opt': True})

        date_format = '%d %b %Y %H:%M:%S'
        job_details = [{
            'content_type':
            'link',
            'title':
            'Gaia revision:',
            'url':
            'https://github.com/mozilla-b2g/gaia/commit/%s' %
            version.get('gaia_changeset'),
            'value':
            version.get('gaia_changeset'),
        }, {
            'content_type':
            'text',
            'title':
            'Gaia date:',
            'value':
            version.get('gaia_date') and time.strftime(
                date_format, time.localtime(int(version.get('gaia_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device identifier:',
            'value': version.get('device_id')
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (date):',
            'value':
            version.get('device_firmware_date') and time.strftime(
                date_format,
                time.localtime(int(version.get('device_firmware_date')))),
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (incremental):',
            'value':
            version.get('device_firmware_version_incremental')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (release):',
            'value': version.get('device_firmware_version_release')
        }]

        ci_url = os.environ.get('BUILD_URL')
        if ci_url:
            job_details.append({
                'url': ci_url,
                'value': ci_url,
                'content_type': 'link',
                'title': 'CI build:'
            })

        # Attach logcat
        adb_device = ADBDevice(self.device_serial)
        with tempfile.NamedTemporaryFile(suffix='logcat.txt') as f:
            f.writelines(adb_device.get_logcat())
            self.logger.debug('Logcat stored in: %s' % f.name)
            try:
                url = self.upload_to_s3(f.name)
                job_details.append({
                    'url': url,
                    'value': 'logcat.txt',
                    'content_type': 'link',
                    'title': 'Log:'
                })
            except S3UploadError:
                job_details.append({
                    'value': 'Failed to upload logcat.txt',
                    'content_type': 'text',
                    'title': 'Error:'
                })

        # Attach log files
        handlers = [
            handler for handler in self.logger.handlers
            if isinstance(handler, StreamHandler)
            and os.path.exists(handler.stream.name)
        ]
        for handler in handlers:
            path = handler.stream.name
            filename = os.path.split(path)[-1]
            try:
                url = self.upload_to_s3(path)
                job_details.append({
                    'url': url,
                    'value': filename,
                    'content_type': 'link',
                    'title': 'Log:'
                })
                # Add log reference
                if type(handler.formatter) is TbplFormatter or \
                        type(handler.formatter) is LogLevelFilter and \
                        type(handler.formatter.inner) is TbplFormatter:
                    job.add_log_reference(filename, url)
            except S3UploadError:
                job_details.append({
                    'value': 'Failed to upload %s' % filename,
                    'content_type': 'text',
                    'title': 'Error:'
                })

        # Attach reports
        for report in [self.html_output, self.xml_output]:
            if report is not None:
                filename = os.path.split(report)[-1]
                try:
                    url = self.upload_to_s3(report)
                    job_details.append({
                        'url': url,
                        'value': filename,
                        'content_type': 'link',
                        'title': 'Report:'
                    })
                except S3UploadError:
                    job_details.append({
                        'value': 'Failed to upload %s' % filename,
                        'content_type': 'text',
                        'title': 'Error:'
                    })

        if job_details:
            job.add_artifact('Job Info', 'json', {'job_details': job_details})

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(self.treeherder_url)
        request = TreeherderRequest(
            protocol=url.scheme,
            host=url.hostname,
            project=project,
            oauth_key=os.environ.get('TREEHERDER_KEY'),
            oauth_secret=os.environ.get('TREEHERDER_SECRET'))
        self.logger.debug('Sending results to Treeherder: %s' %
                          job_collection.to_json())
        response = request.post(job_collection)
        self.logger.debug('Response: %s' % response.read())
        assert response.status == 200, 'Failed to send results!'
        self.logger.info(
            'Results are available to view at: %s' %
            (urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' %
                     (project, revision))))
Esempio n. 49
0
    def post_to_treeherder(self, tests):
        self.logger.info('\nTREEHERDER\n----------')
        version = mozversion.get_version(
            binary=self.bin, sources=self.sources,
            dm_type='adb', device_serial=self.device_serial)

        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()

        device = version.get('device_id')
        if not device:
            self.logger.error('Submitting to Treeherder is currently limited '
                              'to devices.')
            return

        try:
            group = DEVICE_GROUP_MAP[device]
            job.add_group_name(group['name'])
            job.add_group_symbol(group['symbol'])
            job.add_job_name('Gaia Python Integration Test (%s)' % device)
            job.add_job_symbol('Gip')
        except KeyError:
            self.logger.error('Unknown device id: %s, unable to determine '
                              'Treeherder group. Supported device ids: %s' % (
                                  device, DEVICE_GROUP_MAP.keys()))
            return

        # Determine revision hash from application revision
        revision = version['application_changeset']
        project = version['application_repository'].split('/')[-1]
        lookup_url = urljoin(
            self.treeherder_url,
            'api/project/%s/revision-lookup/?revision=%s' % (
                project, revision))
        self.logger.debug('Getting revision hash from: %s' % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), 'Unable to determine revision hash for %s. ' \
                                'Perhaps it has not been ingested by ' \
                                'Treeherder?' % revision
        revision_hash = response.json()[revision]['revision_hash']
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name('b2g')
        job.add_state('completed')

        # Determine test result
        if self.failed or self.unexpected_successes:
            job.add_result('testfailed')
        else:
            job.add_result('success')

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info('b2g', 'b2g-device-image', 'x86')
        job.add_machine_info('b2g', 'b2g-device-image', 'x86')

        # All B2G device builds are currently opt builds
        job.add_option_collection({'opt': True})

        # TODO: Add log reference
        # job.add_log_reference()

        date_format = '%d %b %Y %H:%M:%S'
        job_details = [{
            'content_type': 'link',
            'title': 'Gaia revision:',
            'url': 'https://github.com/mozilla-b2g/gaia/commit/%s' %
                   version.get('gaia_changeset'),
            'value': version.get('gaia_changeset'),
        }, {
            'content_type': 'text',
            'title': 'Gaia date:',
            'value': version.get('gaia_date') and time.strftime(
                date_format, time.localtime(int(version.get('gaia_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device identifier:',
            'value': version.get('device_id')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (date):',
            'value': version.get('device_firmware_date') and time.strftime(
                date_format, time.localtime(int(
                    version.get('device_firmware_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device firmware (incremental):',
            'value': version.get('device_firmware_version_incremental')
        }, {
            'content_type': 'text',
            'title': 'Device firmware (release):',
            'value': version.get('device_firmware_version_release')
        }]

        if self.ci_url:
            job_details.append({
                'url': self.ci_url,
                'value': self.ci_url,
                'content_type': 'link',
                'title': 'CI build:'})

        if job_details:
            job.add_artifact('Job Info', 'json', {'job_details': job_details})

        # TODO: Add XML/HTML reports as artifacts
        # job.add_artifact()

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(self.treeherder_url)
        request = TreeherderRequest(
            protocol=url.scheme,
            host=url.hostname,
            project=project,
            oauth_key=self.treeherder_key,
            oauth_secret=self.treeherder_secret)
        self.logger.debug('Sending results to Treeherder: %s' %
                          job_collection.to_json())
        response = request.post(job_collection)
        self.logger.debug('Response: %s' % response.read())
        assert response.status == 200, 'Failed to send results!'
        self.logger.info('Results are available to view at: %s' % (
            urljoin(self.treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' % (
                project, revision))))
Esempio n. 50
0
    def post_to_treeherder(self, script, treeherder_url):
        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()
        job.add_group_name(self.device_properties['name'])
        job.add_group_symbol(self.device_properties['symbol'])
        job.add_job_name('Orangutan Monkey Script (%s)' %
                         self.device_properties.get('symbol'))
        job.add_job_symbol('Om')

        # Determine revision hash from application revision
        revision = self.version['application_changeset']
        project = self.version['application_repository'].split('/')[-1]
        lookup_url = urljoin(
            treeherder_url, 'api/project/%s/revision-lookup/?revision=%s' %
            (project, revision))
        self._logger.debug('Getting revision hash from: %s' % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), 'Unable to determine revision hash for %s. ' \
                                'Perhaps it has not been ingested by ' \
                                'Treeherder?' % revision
        revision_hash = response.json()[revision]['revision_hash']
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name('b2g')
        job.add_state('completed')
        job.add_result(self.runner.crashed and 'testfailed' or 'success')

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info('b2g', 'b2g-device-image', 'x86')
        job.add_machine_info('b2g', 'b2g-device-image', 'x86')

        if self.is_debug:
            job.add_option_collection({'debug': True})
        else:
            job.add_option_collection({'opt': True})

        date_format = '%d %b %Y %H:%M:%S'
        job_details = [{
            'content_type':
            'link',
            'title':
            'Gaia revision:',
            'url':
            'https://github.com/mozilla-b2g/gaia/commit/%s' %
            self.version.get('gaia_changeset'),
            'value':
            self.version.get('gaia_changeset'),
        }, {
            'content_type':
            'text',
            'title':
            'Gaia date:',
            'value':
            self.version.get('gaia_date') and time.strftime(
                date_format, time.localtime(int(
                    self.version.get('gaia_date')))),
        }, {
            'content_type': 'text',
            'title': 'Device identifier:',
            'value': self.version.get('device_id')
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (date):',
            'value':
            self.version.get('device_firmware_date') and time.strftime(
                date_format,
                time.localtime(int(self.version.get('device_firmware_date')))),
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (incremental):',
            'value':
            self.version.get('device_firmware_version_incremental')
        }, {
            'content_type':
            'text',
            'title':
            'Device firmware (release):',
            'value':
            self.version.get('device_firmware_version_release')
        }]

        ci_url = os.environ.get('BUILD_URL')
        if ci_url:
            job_details.append({
                'url': ci_url,
                'value': ci_url,
                'content_type': 'link',
                'title': 'CI build:'
            })

        # Attach log files
        handlers = [
            handler for handler in self._logger.handlers
            if isinstance(handler, StreamHandler)
            and os.path.exists(handler.stream.name)
        ]
        for handler in handlers:
            path = handler.stream.name
            filename = os.path.split(path)[-1]
            try:
                url = self.upload_to_s3(path)
                job_details.append({
                    'url': url,
                    'value': filename,
                    'content_type': 'link',
                    'title': 'Log:'
                })
                # Add log reference
                if type(handler.formatter) is TbplFormatter or \
                        type(handler.formatter) is LogLevelFilter and \
                        type(handler.formatter.inner) is TbplFormatter:
                    job.add_log_reference(filename, url)
            except S3UploadError:
                job_details.append({
                    'value': 'Failed to upload %s' % filename,
                    'content_type': 'text',
                    'title': 'Error:'
                })

        # Attach script
        filename = os.path.split(script)[-1]
        try:
            url = self.upload_to_s3(script)
            job_details.append({
                'url': url,
                'value': filename,
                'content_type': 'link',
                'title': 'Script:'
            })
        except S3UploadError:
            job_details.append({
                'value': 'Failed to upload %s' % filename,
                'content_type': 'text',
                'title': 'Error:'
            })

        # Attach logcat
        filename = '%s.log' % self.runner.device.dm._deviceSerial
        path = os.path.join(self.temp_dir, filename)
        try:
            url = self.upload_to_s3(path)
            job_details.append({
                'url': url,
                'value': filename,
                'content_type': 'link',
                'title': 'Logcat:'
            })
        except S3UploadError:
            job_details.append({
                'value': 'Failed to upload %s' % filename,
                'content_type': 'text',
                'title': 'Error:'
            })

        if job_details:
            job.add_artifact('Job Info', 'json', {'job_details': job_details})

        # Attach crash dumps
        if self.runner.crashed:
            crash_dumps = os.listdir(self.crash_dumps_path)
            for filename in crash_dumps:
                path = os.path.join(self.crash_dumps_path, filename)
                try:
                    url = self.upload_to_s3(path)
                    job_details.append({
                        'url': url,
                        'value': filename,
                        'content_type': 'link',
                        'title': 'Crash:'
                    })
                except S3UploadError:
                    job_details.append({
                        'value': 'Failed to upload %s' % filename,
                        'content_type': 'text',
                        'title': 'Error:'
                    })

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(treeherder_url)
        request = TreeherderRequest(
            protocol=url.scheme,
            host=url.hostname,
            project=project,
            oauth_key=os.environ.get('TREEHERDER_KEY'),
            oauth_secret=os.environ.get('TREEHERDER_SECRET'))
        self._logger.info('Sending results to Treeherder: %s' % treeherder_url)
        self._logger.debug('Job collection: %s' % job_collection.to_json())
        response = request.post(job_collection)
        if response.status == 200:
            self._logger.debug('Response: %s' % response.read())
            self._logger.info(
                'Results are available to view at: %s' % (urljoin(
                    treeherder_url, '/ui/#/jobs?repo=%s&revision=%s' %
                    (project, revision))))
        else:
            self._logger.error('Failed to send results to Treeherder! '
                               'Response: %s' % response.read())
Esempio n. 51
0
def main():
    result_revision_hash = create_revision_hash()

    trsc = TreeherderResultSetCollection()

    trs = trsc.get_resultset()

    # self.required_properties = {
    #     'revision_hash':{ 'len':50, 'cb':self.validate_existence },
    #     'revisions':{ 'type':list, 'cb':self.validate_existence },
    #     'author':{ 'len':150, 'cb':self.validate_existence }
    #     }

    trs.add_revision_hash(result_revision_hash)
    trs.add_author('WebRTC QA Tests')
    trs.add_push_timestamp(int(time.time()))

    tr = trs.get_revision()

    # self.required_properties = {
    #     'revision':{ 'len':50, 'cb':self.validate_existence },
    #     'repository':{ 'cb':self.validate_existence },
    #     'files':{ 'type':list, 'cb':self.validate_existence },
    #     }

    tr.add_revision(create_revision_hash()[:12])
    tr.add_author('Firefox Nightly')
    tr.add_comment('firefox-33.0a1.en-US')
    tr.add_files(['firefox-33.0a1.en-US.linux-i686.tar.bz2',
                  'firefox-33.0a1.en-US.linux-x86_64.tests.zip'])
    tr.add_repository(
        'ftp://ftp.mozilla.org/pub/firefox/nightly/latest-mozilla-central/')
    trs.add_revision(tr)

    trsc.add(trs)

    tjc = TreeherderJobCollection()
    tj = tjc.get_job()

    # self.required_properties = {
    #     'revision_hash':{ 'len':50, 'cb':self.validate_existence },
    #     'project':{ 'cb':self.validate_existence },
    #     'job':{ 'type':dict, 'cb':self.validate_existence },
    #     'job.job_guid':{ 'len':50, 'cb':self.validate_existence }
    # }

    tj.add_revision_hash(result_revision_hash)
    tj.add_project('qa-try')
    tj.add_job_guid(str(uuid.uuid4()))

    tj.add_build_info('linux', 'linux64', 'x86_64')
    tj.add_description('WebRTC Sunny Day')
    tj.add_machine_info('linux', 'linux64', 'x86_64')
    tj.add_end_timestamp(int(time.time()) - 5)
    tj.add_start_timestamp(int(time.time()) - 3600 * 3 - 5)
    tj.add_submit_timestamp(int(time.time()) - 3600 * 3 - 10)
    tj.add_state('completed')
    tj.add_machine('webrtc-server')
    tj.add_option_collection({'opt': True})  # must not be {}!
    tj.add_reason('testing')
    tj.add_result('success')  # must be success/testfailed/busted
    tj.add_who('*****@*****.**')
    tj.add_group_name('WebRTC QA Tests')
    tj.add_group_symbol('WebRTC')
    tj.add_job_symbol('end')
    tj.add_job_name('Endurance')

    tj.add_artifact('Job Info', 'json', {
        "job_details": [
            {
                'title': 'Iterations:',
                'value': '10782',
                'content_type': 'text'
            },
            {
                'title': 'Errors:',
                'value': '5',
                'content_type': 'text'
            },
            {
                'title': 'Longest Pass Duration:',
                'value': '2:58:36.5',
                'content_type': 'text'
            }
        ],
    })

    tjc.add(tj)

    key, secret = get_oauth_creds()
    project, host = get_repo_details()

    req = TreeherderRequest(
        protocol='http',
        host=host,
        project=project,
        oauth_key=key,
        oauth_secret=secret
    )

    print 'trsc = ' + json.dumps(json.loads(trsc.to_json()), sort_keys=True,
                                 indent=4, separators=(',', ': '))

    print 'tjc = ' + json.dumps(json.loads(tjc.to_json()), sort_keys=True,
                                indent=4, separators=(',', ': '))

    # print 'req.oauth_key = ' + req.oauth_key
    # print 'req.oauth_secret = ' + req.oauth_secret

    # uri = req.get_uri(trsc)
    # print 'req.get_uri() = ' + uri
    # print 'req.oauth_client.get_signed_uri() = ' +
    # req.oauth_client.get_signed_uri(trsc.to_json(), uri)

    req.post(trsc)
    req.post(tjc)
Esempio n. 52
0
    def submit_complete(self, jobs):
        """ Submit results to Treeherder, including uploading logs.
        All jobs are submitted to the same project in one
        TreeherderJobCollection.

        :param jobs: list of jobs (TestJob).
        """
        self.logger.debug(type(self).__name__ +
                          '.submit_complete: jobs =\n%s' % jobs)
        if not self.url or not jobs:
            self.logger.debug(type(self).__name__ +
                              '.submit_complete: no url/job')
            return

        tjc = TreeherderJobCollection()

        for j in jobs:
            project = j.build['repo']
            revision = j.build['revision']
            revision_hash = self.request_revision_hash(project, revision)
            if not revision_hash:
                self.logger.debug(type(self).__name__ +
                                  '.submit_complete: no revision hash')
                return
            self.logger.debug(type(self).__name__ + '.submit_complete '
                              'for %s %s' % (j.name, project))
            j.end_timestamp = timestamp_now()
            # A usercancelled job may not have a start_timestamp
            # since it may have been cancelled before it started.
            if not j.start_timestamp:
                j.start_timestamp = j.end_timestamp
            # If a 'pending' submission was never made for this job,
            # the submit_timestamp may be blank.
            if not j.submit_timestamp:
                j.submit_timestamp = j.end_timestamp

            if j.test_result:
                if j.test_result.failed == 0:
                    failed = '0'
                else:
                    failed = ('<em class="testfail">%s</em>'
                              % j.test_result.failed)

                j.job_details.append({
                    'value': "%s/%s/%s" % (j.test_result.passed,
                                           failed, j.test_result.todo),
                    'content_type': 'raw_html',
                    'title': "%s-%s (pass/fail/todo)" % (j.job_name,
                                                         j.job_symbol)
                })

            tj = tjc.get_job()
            tj.add_tier(self.tier)
            tj.add_description(j.description)
            tj.add_reason(j.reason)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_who(j.who)
            # Note: job_guid should be added before artifacts.
            tj.add_job_guid(j.job_guid)
            tj.add_job_name(j.job_name)
            tj.add_job_symbol(j.job_symbol)
            tj.add_group_name(j.group_name)
            tj.add_group_symbol(j.group_symbol)
            tj.add_product_name(j.build['product'])
            tj.add_state(JobState.COMPLETED)
            tj.add_result(j.result)
            tj.add_submit_timestamp(j.submit_timestamp)
            tj.add_start_timestamp(j.start_timestamp)
            tj.add_end_timestamp(j.end_timestamp)
            tj.add_build_info(j.build['os_name'],
                              j.build['platform'],
                              j.build['architecture'])
            tj.add_machine(j.machine['host'])
            tj.add_machine_info(j.machine['os_name'],
                                j.machine['platform'],
                                j.machine['architecture'])
            tj.add_option_collection({'opt': True})

            # Job details and other artifacts

            # Create/add text_log_summary for each log that should be parsed
            def build_log_artifacts(log_file, log_url):
                log_name = os.path.basename(log_file)
                if (not log_url) or (log_file not in j.parsed_logs):
                    return
                tj.add_log_reference(log_name, log_url, parse_status='parsed')
                # NOTE must have started_linenumber < finished_linenumber
                text_log_summary = parse_log(log_file, log_url, self.logger)
                tj.add_artifact('text_log_summary', 'json',
                                json.dumps(text_log_summary))
                self.logger.debug(type(self).__name__ +
                                  '.submit_complete text_log_summary: %s' %
                                  pretty(text_log_summary))

            # File uploads
            if self.s3_bucket:
                prefix = j.unique_s3_prefix
                filepaths = j.log_files + j.config_files
                for path in filepaths:
                    url = upload_file(self.s3_bucket, prefix,
                                      path, self.logger, j)
                    build_log_artifacts(path, url)
                if j.upload_dir:
                    for f in glob.glob(os.path.join(j.upload_dir, '*')):
                        url = upload_file(self.s3_bucket, prefix, f,
                                          self.logger, j)
                        build_log_artifacts(path, url)
            tj.add_artifact('Job Info', 'json', {'job_details': j.job_details})
            for a in j.artifacts:
                tj.add_artifact(*a)

            tjc.add(tj)

            message = j.message
            if j.test_result:
                message += '\nTestResult: %s %s' % (j.test_result.status,
                                                    j.name)
            if message:
                self.logger.info(message)

        self.post_request(project, tjc, j.job_guid)
Esempio n. 53
0
    def submit_complete(self, jobs):
        """ Submit results to Treeherder, including uploading logs.
        All jobs are submitted to the same project in one
        TreeherderJobCollection.

        :param jobs: list of jobs (TestJob).
        """
        self.logger.debug(type(self).__name__ +
                          '.submit_complete: jobs =\n%s' % jobs)
        if not self.url or not jobs:
            self.logger.debug(type(self).__name__ +
                              '.submit_complete: no url/job')
            return

        tjc = TreeherderJobCollection()

        for j in jobs:
            project = j.build['repo']
            revision = j.build['revision']
            revision_hash = self.request_revision_hash(project, revision)
            if not revision_hash:
                self.logger.debug(type(self).__name__ +
                                  '.submit_complete: no revision hash')
                return
            self.logger.debug(type(self).__name__ + '.submit_complete '
                              'for %s %s' % (j.name, project))
            j.end_timestamp = timestamp_now()
            # A usercancelled job may not have a start_timestamp
            # since it may have been cancelled before it started.
            if not j.start_timestamp:
                j.start_timestamp = j.end_timestamp
            # If a 'pending' submission was never made for this job,
            # the submit_timestamp may be blank.
            if not j.submit_timestamp:
                j.submit_timestamp = j.end_timestamp

            if j.test_result:
                if j.test_result.failed == 0:
                    failed = '0'
                else:
                    failed = ('<em class="testfail">%s</em>'
                              % j.test_result.failed)

                j.job_details.append({
                    'value': "%s/%s/%s" % (j.test_result.passed,
                                           failed, j.test_result.todo),
                    'content_type': 'raw_html',
                    'title': "%s-%s (pass/fail/todo)" % (j.job_name,
                                                         j.job_symbol)
                })

            tj = tjc.get_job()
            tj.add_tier(self.tier)
            tj.add_description(j.description)
            tj.add_reason(j.reason)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_who(j.who)
            # Note: job_guid should be added before artifacts.
            tj.add_job_guid(j.job_guid)
            tj.add_job_name(j.job_name)
            tj.add_job_symbol(j.job_symbol)
            tj.add_group_name(j.group_name)
            tj.add_group_symbol(j.group_symbol)
            tj.add_product_name(j.build['product'])
            tj.add_state(JobState.COMPLETED)
            tj.add_result(j.result)
            tj.add_submit_timestamp(j.submit_timestamp)
            tj.add_start_timestamp(j.start_timestamp)
            tj.add_end_timestamp(j.end_timestamp)
            tj.add_build_info(j.build['os_name'],
                              j.build['platform'],
                              j.build['architecture'])
            tj.add_machine(j.machine['host'])
            tj.add_machine_info(j.machine['os_name'],
                                j.machine['platform'],
                                j.machine['architecture'])
            tj.add_option_collection({'opt': True})

            # Job details and other artifacts

            # Add text_log_summary for each parsed log
            def process_parsed_log(log_file, log_url):
                log_name = os.path.basename(log_file)
                if (not log_url) or (log_file not in j.parsed_logs):
                    return
                error_lines = [{'line': line, 'linenumber': None} for line in j.parsed_logs[log_file]]
                tj.add_log_reference(log_name, log_url, parse_status='parsed')
                # NOTE must have started_linenumber < finished_linenumber
                text_log_summary = {
                'step_data': {
                    'all_errors': error_lines,
                    #'steps': [
                    #    {
                    #        'name': 'step',
                    #        'started_linenumber': None,
                    #        'finished_linenumber': None,
                    #        'duration': j.end_timestamp - j.start_timestamp,
                    #        'finished': '%s' % datetime.datetime.fromtimestamp(j.end_timestamp),
                    #        'errors': error_lines,
                    #        'error_count': len(error_lines),
                    #        'order': 0,
                    #        'result': j.result
                    #    },
                    #],
                    'errors_truncated': False
                    },
                'logurl': log_url,
                'logname': log_name
                }
                tj.add_artifact('text_log_summary', 'json',
                                json.dumps(text_log_summary))
                self.logger.debug(type(self).__name__ +
                                  '.submit_complete text_log_summary: %s' %
                                  pretty(text_log_summary))

            # File uploads
            if self.s3_bucket:
                prefix = j.unique_s3_prefix
                filepaths = j.log_files + j.config_files
                for path in filepaths:
                    url = upload_file(self.s3_bucket, prefix,
                                      path, self.logger, j)
                    process_parsed_log(path, url)
                if j.upload_dir:
                    for f in glob.glob(os.path.join(j.upload_dir, '*')):
                        url = upload_file(self.s3_bucket, prefix, f,
                                          self.logger, j)
                        process_parsed_log(path, url)
            tj.add_artifact('Job Info', 'json', {'job_details': j.job_details})
            for a in j.artifacts:
                tj.add_artifact(*a)

            tjc.add(tj)

            message = j.message
            if j.test_result:
                message += '\nTestResult: %s %s' % (j.test_result.status,
                                                    j.name)
            if message:
                self.logger.info(message)

        self.post_request(project, tjc, j.job_guid)
Esempio n. 54
0
    def post_to_treeherder(self, tests):
        version = mozversion.get_version(
            binary=self.bin, sources=self.sources, dm_type="adb", device_serial=self.device_serial
        )

        job_collection = TreeherderJobCollection()
        job = job_collection.get_job()

        device = version.get("device_id")
        device_firmware_version_release = version.get("device_firmware_version_release")

        if not device:
            self.logger.error("Submitting to Treeherder is currently limited " "to devices.")
            return

        try:
            group = DEVICE_GROUP_MAP[device][device_firmware_version_release]
            job.add_group_name(group["name"])
            job.add_group_symbol(group["symbol"])
            job.add_job_name("Gaia Python Integration Test (%s)" % group["symbol"])
            job.add_job_symbol("Gip")
        except KeyError:
            self.logger.error(
                "Unknown device id: %s or device firmware "
                "version: %s. Unable to determine Treeherder "
                "group. Supported devices: %s"
                % (
                    device,
                    device_firmware_version_release,
                    ["%s: %s" % (k, [fw for fw in v.keys()]) for k, v in DEVICE_GROUP_MAP.iteritems()],
                )
            )
            return

        # Determine revision hash from application revision
        revision = version["application_changeset"]
        project = version["application_repository"].split("/")[-1]
        lookup_url = urljoin(self.treeherder_url, "api/project/%s/revision-lookup/?revision=%s" % (project, revision))
        self.logger.debug("Getting revision hash from: %s" % lookup_url)
        response = requests.get(lookup_url)
        response.raise_for_status()
        assert response.json(), (
            "Unable to determine revision hash for %s. " "Perhaps it has not been ingested by " "Treeherder?" % revision
        )
        revision_hash = response.json()[revision]["revision_hash"]
        job.add_revision_hash(revision_hash)
        job.add_project(project)
        job.add_job_guid(str(uuid.uuid4()))
        job.add_product_name("b2g")
        job.add_state("completed")

        # Determine test result
        if self.failed or self.unexpected_successes:
            job.add_result("testfailed")
        else:
            job.add_result("success")

        job.add_submit_timestamp(int(self.start_time))
        job.add_start_timestamp(int(self.start_time))
        job.add_end_timestamp(int(self.end_time))

        job.add_machine(socket.gethostname())
        job.add_build_info("b2g", "b2g-device-image", "x86")
        job.add_machine_info("b2g", "b2g-device-image", "x86")

        # All B2G device builds are currently opt builds
        job.add_option_collection({"opt": True})

        date_format = "%d %b %Y %H:%M:%S"
        job_details = [
            {
                "content_type": "link",
                "title": "Gaia revision:",
                "url": "https://github.com/mozilla-b2g/gaia/commit/%s" % version.get("gaia_changeset"),
                "value": version.get("gaia_changeset"),
            },
            {
                "content_type": "text",
                "title": "Gaia date:",
                "value": version.get("gaia_date")
                and time.strftime(date_format, time.localtime(int(version.get("gaia_date")))),
            },
            {"content_type": "text", "title": "Device identifier:", "value": version.get("device_id")},
            {
                "content_type": "text",
                "title": "Device firmware (date):",
                "value": version.get("device_firmware_date")
                and time.strftime(date_format, time.localtime(int(version.get("device_firmware_date")))),
            },
            {
                "content_type": "text",
                "title": "Device firmware (incremental):",
                "value": version.get("device_firmware_version_incremental"),
            },
            {
                "content_type": "text",
                "title": "Device firmware (release):",
                "value": version.get("device_firmware_version_release"),
            },
        ]

        ci_url = os.environ.get("BUILD_URL")
        if ci_url:
            job_details.append({"url": ci_url, "value": ci_url, "content_type": "link", "title": "CI build:"})

        # Attach logcat
        adb_device = ADBDevice(self.device_serial)
        with tempfile.NamedTemporaryFile(suffix="logcat.txt") as f:
            f.writelines(adb_device.get_logcat())
            self.logger.debug("Logcat stored in: %s" % f.name)
            try:
                url = self.upload_to_s3(f.name)
                job_details.append({"url": url, "value": "logcat.txt", "content_type": "link", "title": "Log:"})
            except S3UploadError:
                job_details.append({"value": "Failed to upload logcat.txt", "content_type": "text", "title": "Error:"})

        # Attach log files
        handlers = [
            handler
            for handler in self.logger.handlers
            if isinstance(handler, StreamHandler) and os.path.exists(handler.stream.name)
        ]
        for handler in handlers:
            path = handler.stream.name
            filename = os.path.split(path)[-1]
            try:
                url = self.upload_to_s3(path)
                job_details.append({"url": url, "value": filename, "content_type": "link", "title": "Log:"})
                # Add log reference
                if (
                    type(handler.formatter) is TbplFormatter
                    or type(handler.formatter) is LogLevelFilter
                    and type(handler.formatter.inner) is TbplFormatter
                ):
                    job.add_log_reference(filename, url)
            except S3UploadError:
                job_details.append(
                    {"value": "Failed to upload %s" % filename, "content_type": "text", "title": "Error:"}
                )

        # Attach reports
        for report in [self.html_output]:
            if report is not None:
                filename = os.path.split(report)[-1]
                try:
                    url = self.upload_to_s3(report)
                    job_details.append({"url": url, "value": filename, "content_type": "link", "title": "Report:"})
                except S3UploadError:
                    job_details.append(
                        {"value": "Failed to upload %s" % filename, "content_type": "text", "title": "Error:"}
                    )

        if job_details:
            job.add_artifact("Job Info", "json", {"job_details": job_details})

        job_collection.add(job)

        # Send the collection to Treeherder
        url = urlparse(self.treeherder_url)
        request = TreeherderRequest(
            protocol=url.scheme,
            host=url.hostname,
            project=project,
            oauth_key=os.environ.get("TREEHERDER_KEY"),
            oauth_secret=os.environ.get("TREEHERDER_SECRET"),
        )
        self.logger.debug("Sending results to Treeherder: %s" % job_collection.to_json())
        response = request.post(job_collection)
        self.logger.debug("Response: %s" % response.read())
        assert response.status == 200, "Failed to send results!"
        self.logger.info(
            "Results are available to view at: %s"
            % (urljoin(self.treeherder_url, "/ui/#/jobs?repo=%s&revision=%s" % (project, revision)))
        )
Esempio n. 55
0
    def submit_complete(self,
                        machine,
                        build_url,
                        project,
                        revision,
                        build_type,
                        build_abi,
                        build_platform,
                        build_sdk,
                        builder_type,
                        tests=None):
        """Submit test results for the worker's current job to Treeherder.

        :param machine: machine id
        :param build_url: url to build being tested.
        :param project: repository of build.
        :param revision: Either a URL to the changeset or the revision id.
        :param tests: Lists of tests to be reported.
        """
        LOGGER.debug('AutophoneTreeherder.submit_complete: %s', tests)

        if not self.url or not revision:
            LOGGER.debug(
                'AutophoneTreeherder.submit_complete: no url/revision')
            return

        tjc = TreeherderJobCollection()

        for t in tests:
            LOGGER.debug('AutophoneTreeherder.submit_complete for %s %s',
                         t.name, project)

            t.end_timestamp = timestamp_now()
            # A usercancelled job may not have a start_timestamp
            # since it may have been cancelled before it started.
            if not t.start_timestamp:
                t.start_timestamp = t.end_timestamp

            tj = self._create_job(tjc, machine, build_url, project, revision,
                                  build_type, build_abi, build_platform,
                                  build_sdk, builder_type, t)
            tj.add_state(TestState.COMPLETED)
            tj.add_result(t.status)
            tj.add_submit_timestamp(t.submit_timestamp)
            tj.add_start_timestamp(t.start_timestamp)
            tj.add_end_timestamp(t.end_timestamp)

            t.job_details.append({
                'value': os.path.basename(t.config_file),
                'title': 'Config'
            })
            t.job_details.append({
                'url': build_url,
                'value': os.path.basename(build_url),
                'title': 'Build'
            })
            t.job_details.append({'value': utils.host(), 'title': 'Host'})

            if t.passed + t.failed + t.todo > 0:
                if t.failed == 0:
                    failed = '0'
                else:
                    failed = '<em class="testfail">%s</em>' % t.failed

                t.job_details.append({
                    'value':
                    "%s/%s/%s" % (t.passed, failed, t.todo),
                    'title':
                    "%s-%s" % (t.job_name, t.job_symbol)
                })

            if hasattr(t, 'phonedash_url'):
                t.job_details.append({
                    'url': t.phonedash_url,
                    'value': 'graph',
                    'title': 'phonedash'
                })

            # Attach logs, ANRs, tombstones, etc.

            logurl = None
            if self.s3_bucket:
                # We must make certain that S3 keys for uploaded files
                # are unique. We can create a unique log_identifier as
                # follows: For Unittests, t.unittest_logpath's
                # basename contains a unique name based on the actual
                # Unittest name, chunk and machine id. For
                # Non-Unittests, the test classname, chunk and machine
                # id can be used.

                if t.unittest_logpath:
                    log_identifier = os.path.splitext(
                        os.path.basename(t.unittest_logpath))[0]
                else:
                    log_identifier = "%s-%s-%s-%s" % (
                        t.name, os.path.basename(
                            t.config_file), t.chunk, machine)
                # We must make certain the key is unique even in the
                # event of retries.
                log_identifier = '%s-%s' % (log_identifier, t.job_guid)

                key_prefix = os.path.dirname(urlparse.urlparse(build_url).path)
                key_prefix = re.sub('/tmp$', '', key_prefix)

                # Logcat
                fname = '%s-logcat.log' % log_identifier
                lname = 'logcat'
                key = "%s/%s" % (key_prefix, fname)
                with tempfile.NamedTemporaryFile(suffix='logcat.txt') as f:
                    try:
                        if self.worker.is_ok():
                            for line in t.worker_subprocess.logcat.get(
                                    full=True):
                                f.write('%s\n' %
                                        line.encode('UTF-8', errors='replace'))
                            t.worker_subprocess.logcat.reset()
                        else:
                            # Device is in an error state so we can't
                            # get the full logcat but we can output
                            # any logcat output we accumulated
                            # previously.
                            for line in t.worker_subprocess.logcat._accumulated_logcat:
                                f.write('%s\n' %
                                        line.encode('UTF-8', errors='replace'))
                        f.flush()
                    except Exception, e:
                        LOGGER.exception('Error reading logcat %s', fname)
                        t.job_details.append({
                            'value':
                            'Failed to read %s: %s' % (fname, e),
                            'title':
                            'Error'
                        })
                    try:
                        url = self.s3_bucket.upload(f.name, key)
                        t.job_details.append({
                            'url': url,
                            'value': lname,
                            'title': 'artifact uploaded'
                        })
                    except S3Error, e:
                        LOGGER.exception('Error uploading logcat %s', fname)
                        t.job_details.append({
                            'value':
                            'Failed to upload %s: %s' % (fname, e),
                            'title':
                            'Error'
                        })
                # Upload directory containing ANRs, tombstones and other items
                # to be uploaded.
                if t.upload_dir:
                    for f in glob.glob(os.path.join(t.upload_dir, '*')):
                        try:
                            lname = os.path.basename(f)
                            try:
                                fname = '%s-%s' % (log_identifier, lname)
                            except UnicodeDecodeError, e:
                                LOGGER.exception(
                                    'Ignoring artifact %s',
                                    lname.decode('utf-8', errors='replace'))
                                continue
                            url = self.s3_bucket.upload(
                                f, "%s/%s" % (key_prefix, fname))
                            t.job_details.append({
                                'url': url,
                                'value': lname,
                                'title': 'artifact uploaded'
                            })
                        except S3Error, e:
                            LOGGER.exception('Error uploading artifact %s',
                                             fname)
                            t.job_details.append({
                                'value':
                                'Failed to upload artifact %s: %s' %
                                (fname, e),
                                'title':
                                'Error'
                            })
Esempio n. 56
0
    def transform(self, data):
        """
        transform the builds4h structure into something we can ingest via
        our restful api
        """
        revisions = defaultdict(list)
        missing_resultsets = defaultdict(set)

        projects = set(x.project for x in Datasource.objects.cached())

        for build in data['builds']:
            prop = build['properties']

            if not 'branch' in prop:
                logger.warning("property 'branch' not found in build4h")
                continue

            if not prop['branch'] in projects:
                logger.warning("skipping job on unsupported branch {0}".format(
                    prop['branch']))
                continue

            prop['revision'] = prop.get(
                'revision',
                prop.get('got_revision', prop.get('sourcestamp', None)))

            if not prop['revision']:
                logger.warning("property 'revision' not found in build4h")
                continue

            prop['revision'] = prop['revision'][0:12]
            revisions[prop['branch']].append(prop['revision'])

        revisions_lookup = common.lookup_revisions(revisions)

        # Holds one collection per unique branch/project
        th_collections = {}

        for build in data['builds']:
            try:
                prop = build['properties']
                project = prop['branch']
                artifact_build = copy.deepcopy(build)
                resultset = common.get_resultset(project, revisions_lookup,
                                                 prop['revision'],
                                                 missing_resultsets, logger)
            except KeyError:
                # skip this job, at least at this point
                continue

            treeherder_data = {
                'revision_hash': resultset['revision_hash'],
                'resultset_id': resultset['id'],
                'project': project,
                'coalesced': []
            }

            platform_info = buildbot.extract_platform_info(prop['buildername'])
            job_name_info = buildbot.extract_name_info(prop['buildername'])

            device_name = buildbot.get_device_or_unknown(
                job_name_info.get('name', ''), platform_info['vm'])

            if 'log_url' in prop:
                log_reference = [{'url': prop['log_url'], 'name': 'builds-4h'}]
            else:
                log_reference = []

            # request_id and request_time are mandatory
            # and they can be found in a couple of different places
            try:
                job_guid_data = self.find_job_guid(build)
                request_ids = build['properties'].get('request_ids',
                                                      build['request_ids'])
            except KeyError:
                continue

            treeherder_data['coalesced'] = job_guid_data['coalesced']

            def prop_remove(field):
                try:
                    del (artifact_build['properties'][field])
                except:
                    pass

            prop_remove("product")
            prop_remove("project")
            prop_remove("buildername")
            prop_remove("slavename")
            prop_remove("build_url")
            prop_remove("log_url")
            prop_remove("slavebuilddir")
            prop_remove("branch")
            prop_remove("repository")
            prop_remove("revision")

            del (artifact_build['requesttime'])
            del (artifact_build['starttime'])
            del (artifact_build['endtime'])

            job = {
                'job_guid':
                job_guid_data['job_guid'],
                'name':
                job_name_info.get('name', ''),
                'job_symbol':
                job_name_info.get('job_symbol', ''),
                'group_name':
                job_name_info.get('group_name', ''),
                'group_symbol':
                job_name_info.get('group_symbol', ''),
                'reference_data_name':
                prop['buildername'],
                'product_name':
                prop.get('product', ''),
                'state':
                'completed',
                'result':
                buildbot.RESULT_DICT[build['result']],
                'reason':
                build['reason'],
                #scheduler, if 'who' property is not present
                'who':
                prop.get('who', prop.get('scheduler', '')),
                'submit_timestamp':
                build['requesttime'],
                'start_timestamp':
                build['starttime'],
                'end_timestamp':
                build['endtime'],
                'machine':
                prop.get('slavename', 'unknown'),
                #build_url not present in all builds
                'build_url':
                prop.get('build_url', ''),
                #build_platform same as machine_platform
                'build_platform': {
                    #platform attributes sometimes parse without results
                    'os_name': platform_info.get('os', ''),
                    'platform': platform_info.get('os_platform', ''),
                    'architecture': platform_info.get('arch', '')
                },
                'machine_platform': {
                    'os_name': platform_info.get('os', ''),
                    'platform': platform_info.get('os_platform', ''),
                    'architecture': platform_info.get('arch', '')
                },
                'device_name':
                device_name,
                #pgo or non-pgo dependent on buildername parsing
                'option_collection': {
                    buildbot.extract_build_type(prop['buildername']): True
                },
                'log_references':
                log_reference,
                'artifacts': [
                    {
                        'type': 'json',
                        'name': 'buildapi_complete',
                        'log_urls': [],
                        'blob': artifact_build
                    },
                    {
                        'type': 'json',
                        'name': 'buildapi',
                        'log_urls': [],
                        'blob': {
                            'buildername': build['properties']['buildername'],
                            'request_id': max(request_ids)
                        }
                    },
                ]
            }

            treeherder_data['job'] = job

            if project not in th_collections:
                th_collections[project] = TreeherderJobCollection()

            # get treeherder job instance and add the job instance
            # to the collection instance
            th_job = th_collections[project].get_job(treeherder_data)
            th_collections[project].add(th_job)

        if missing_resultsets:
            common.fetch_missing_resultsets("builds4h", missing_resultsets,
                                            logger)

        return th_collections
Esempio n. 57
0
    def transform(self, data):
        """
        transform the buildapi structure into something we can ingest via
        our restful api
        """
        projects = set(x.project for x in Datasource.objects.cached())
        revision_dict = defaultdict(list)
        missing_resultsets = defaultdict(set)

        # loop to catch all the revisions
        for project, revisions in data['running'].items():
            # this skips those projects we don't care about
            if project not in projects:
                continue

            for rev, jobs in revisions.items():
                revision_dict[project].append(rev)

        # retrieving the revision->resultset lookups
        revisions_lookup = common.lookup_revisions(revision_dict)

        th_collections = {}

        for project, revisions in data['running'].items():

            for revision, jobs in revisions.items():

                try:
                    resultset = common.get_resultset(project, revisions_lookup,
                                                     revision,
                                                     missing_resultsets,
                                                     logger)
                except KeyError:
                    # skip this job, at least at this point
                    continue

                # using project and revision form the revision lookups
                # to filter those jobs with unmatched revision
                for running_job in jobs:
                    treeherder_data = {
                        'revision_hash': resultset['revision_hash'],
                        'resultset_id': resultset['id'],
                        'project': project,
                    }

                    platform_info = buildbot.extract_platform_info(
                        running_job['buildername'])
                    job_name_info = buildbot.extract_name_info(
                        running_job['buildername'])
                    device_name = buildbot.get_device_or_unknown(
                        job_name_info.get('name', ''), platform_info['vm'])

                    new_job = {
                        'job_guid':
                        common.generate_job_guid(running_job['request_ids'][0],
                                                 running_job['submitted_at']),
                        'name':
                        job_name_info.get('name', ''),
                        'job_symbol':
                        job_name_info.get('job_symbol', ''),
                        'group_name':
                        job_name_info.get('group_name', ''),
                        'group_symbol':
                        job_name_info.get('group_symbol', ''),
                        'reference_data_name':
                        running_job['buildername'],
                        'state':
                        'running',
                        'submit_timestamp':
                        running_job['submitted_at'],
                        'start_timestamp':
                        running_job['start_time'],
                        'build_platform': {
                            'os_name': platform_info['os'],
                            'platform': platform_info['os_platform'],
                            'architecture': platform_info['arch'],
                            'vm': platform_info['vm']
                        },
                        #where are we going to get this data from?
                        'machine_platform': {
                            'os_name': platform_info['os'],
                            'platform': platform_info['os_platform'],
                            'architecture': platform_info['arch'],
                            'vm': platform_info['vm']
                        },
                        'device_name':
                        device_name,
                        'who':
                        'unknown',
                        'option_collection': {
                            # build_type contains an option name, eg. PGO
                            buildbot.extract_build_type(running_job['buildername']):
                            True
                        },
                        'log_references': [],
                        'artifacts': [
                            {
                                'type': 'json',
                                'name': 'buildapi_running',
                                'log_urls': [],
                                'blob': running_job
                            },
                            {
                                'type': 'json',
                                'name': 'buildapi',
                                'log_urls': [],
                                'blob': {
                                    'buildername': running_job['buildername'],
                                    'request_id':
                                    max(running_job['request_ids'])
                                }
                            },
                        ]
                    }

                    treeherder_data['job'] = new_job

                    if project not in th_collections:
                        th_collections[project] = TreeherderJobCollection(
                            job_type='update')

                    # get treeherder job instance and add the job instance
                    # to the collection instance
                    th_job = th_collections[project].get_job(treeherder_data)
                    th_collections[project].add(th_job)

        if missing_resultsets:
            common.fetch_missing_resultsets("running", missing_resultsets,
                                            logger)

        return th_collections
Esempio n. 58
0
    def submit_running(self, jobs):
        """Submit jobs running notifications to Treeherder
        :param jobs: Lists of jobs to be reported. (TestJob)
        """
        self.logger.debug(type(self).__name__ +
                          '.submit_running: jobs =\n%s' % jobs)
        if not self.url or not jobs:
            self.logger.debug(type(self).__name__ +
                              '.submit_running: no url/job')
            return

        tjc = TreeherderJobCollection()

        for j in jobs:
            project = j.build['repo']
            revision = j.build['revision']
            revision_hash = self.request_revision_hash(project, revision)
            if not revision_hash:
                self.logger.debug(type(self).__name__ +
                                  '.submit_running: no revision hash')
                return
            self.logger.debug(type(self).__name__ + '.submit_running: '
                              'for %s %s' % (j.name, project))

            if not j.start_timestamp:
                j.start_timestamp = timestamp_now()
            if not j.submit_timestamp:
                # If a 'pending' submission was never made for this job,
                # the submit_timestamp may be blank.
                j.submit_timestamp = timestamp_now()

            tj = tjc.get_job()
            tj.add_description(j.description)
            tj.add_reason(j.reason)
            tj.add_revision_hash(revision_hash)
            tj.add_project(project)
            tj.add_who(j.who)
            tj.add_job_guid(j.job_guid)
            tj.add_job_name(j.job_name)
            tj.add_job_symbol(j.job_symbol)
            tj.add_group_name(j.group_name)
            tj.add_group_symbol(j.group_symbol)
            tj.add_product_name(j.build['product'])
            tj.add_state(JobState.RUNNING)
            tj.add_submit_timestamp(j.submit_timestamp)
            tj.add_start_timestamp(j.start_timestamp)
            # XXX need to send these until Bug 1066346 fixed.
            tj.add_end_timestamp(j.start_timestamp)
            #
            tj.add_machine(j.machine['host'])
            tj.add_build_info(j.build['os_name'],
                              j.build['platform'],
                              j.build['architecture'])
            tj.add_machine(j.machine['host'])
            tj.add_machine_info(j.machine['os_name'],
                                j.machine['platform'],
                                j.machine['architecture'])
            tj.add_option_collection({'opt': True})

            tjc.add(tj)
        self.post_request(project, tjc, j.job_guid)