def submit(self,
               revision,
               browser,
               timestamp,
               perf_data,
               link='',
               version='',
               repo_link='',
               video_links='',
               extra_info_obj={}):

        j_dataset = self.create_job_dataset(revision=revision,
                                            browser=browser,
                                            timestamp=timestamp,
                                            perf_data=perf_data,
                                            link=link,
                                            version=version,
                                            repo_link=repo_link,
                                            video_links=video_links,
                                            extra_info_obj=extra_info_obj)
        tjc = self.create_job_collection(j_dataset)

        if self.server_url:
            client = TreeherderClient(server_url=self.server_url,
                                      client_id=self.client_id,
                                      secret=self.secret)
        else:
            client = TreeherderClient(client_id=self.client_id,
                                      secret=self.secret)

        client.post_collection(self.repo, tjc)
    def submit(self,
               revision,
               browser,
               timestamp,
               perf_data,
               version='',
               repo_link='',
               video_links='',
               extra_info_obj={}):

        j_dataset = self.create_job_dataset(revision=revision,
                                            browser=browser,
                                            timestamp=timestamp,
                                            perf_data=perf_data,
                                            version=version,
                                            repo_link=repo_link,
                                            video_links=video_links,
                                            extra_info_obj=extra_info_obj)
        tjc = self.create_job_collection(j_dataset)

        if self.server_url:
            client = TreeherderClient(server_url=self.server_url,
                                      client_id=self.client_id,
                                      secret=self.secret)
        else:
            client = TreeherderClient(client_id=self.client_id,
                                      secret=self.secret)

        try:
            return_result = client.post_collection(self.repo, tjc)
        except Exception as e:
            print e.message
            print traceback.print_exc()
            return None
        return return_result
def main(args):
    compare_to_client = TreeherderClient(server_url=HOSTS[args.host])
    production_client = TreeherderClient(server_url=HOSTS["production"])

    # Support comma separated projects
    projects = args.projects.split(',')
    for _project in projects:
        logger.info("Comparing {} against production.".format(_project))
        # Remove properties that are irrelevant for the comparison
        pushes = compare_to_client.get_pushes(_project, count=50)
        for _push in sorted(pushes, key=lambda push: push["revision"]):
            del _push["id"]
            for _rev in _push["revisions"]:
                del _rev["result_set_id"]

        production_pushes = production_client.get_pushes(_project, count=50)
        for _push in sorted(production_pushes, key=lambda push: push["revision"]):
            del _push["id"]
            for _rev in _push["revisions"]:
                del _rev["result_set_id"]

        for index in range(0, len(pushes)):
            assert pushes[index]["revision"] == production_pushes[index]["revision"]
            difference = DeepDiff(pushes[index], production_pushes[index])
            if difference:
                logger.info(difference.to_json())
                logger.info("{}/#/jobs?repo={}&revision={}".format(
                            compare_to_client.server_url,
                            _project,
                            pushes[index]["revision"]))
                logger.info("{}/#/jobs?repo={}&revision={}".format(
                            production_client.server_url,
                            _project,
                            production_pushes[index]["revision"]))
Beispiel #4
0
    def post_request(self, project, job_collection, guid=None):
        self.logger.debug(type(self).__name__ + '.post_request - '
                          'job_collection =\n%s' %
                          pretty(job_collection.get_collection_data()))

        client = TreeherderClient(protocol=self.protocol,
                                  host=self.server,
                                  client_id=self.credentials['client_id'],
                                  secret=self.credentials['secret'])

        for attempt in range(1, self.retries + 1):
            try:
                client.post_collection(project, job_collection)
                self.logger.info(type(self).__name__ +
                                 '.post_request - collection posted')
                if guid:
                    job_url = self.request_job_url(project, guid)
                    self.logger.info(type(self).__name__ +
                                     '.post_request - url is %s' % job_url)
                return
            except requests.exceptions.Timeout:
                message = ('Attempt %d to post result to '
                           'Treeherder timed out.' % attempt)
                self.logger.error(message)
                time.sleep(self.retry_wait)
            except Exception as e:
                message = ('Error submitting request to Treeherder\n\n'
                           'Exception: %s\n'
                           'TreeherderJobCollection %s\n' %
                           (e, pretty(job_collection.get_collection_data())))
                self.logger.exception(message)
                return
        self.logger.error('Error submitting request to Treeherder.')
    def submit(self, job, logs=None):
        logs = logs or []

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json',
                             {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        print('Sending results to Treeherder: {}'.format(
            job_collection.to_json()))
        url = urlparse(self.url)

        client = TreeherderClient(protocol=url.scheme,
                                  host=url.hostname,
                                  client_id=self.client_id,
                                  secret=self.secret)
        client.post_collection(self.repository, job_collection)

        print('Results are available to view at: {}'.format(
            urljoin(
                self.url,
                JOB_FRAGMENT.format(repository=self.repository,
                                    revision=self.revision))))
Beispiel #6
0
    def __init__(self,
                 worker_subprocess,
                 options,
                 jobs,
                 s3_bucket=None,
                 mailer=None,
                 shared_lock=None):
        assert options, "options is required."
        assert shared_lock, "shared_lock is required."

        self.options = options
        self.jobs = jobs
        self.s3_bucket = s3_bucket
        self.mailer = mailer
        self.shared_lock = shared_lock
        self.worker = worker_subprocess
        self.shutdown_requested = False
        LOGGER.debug('AutophoneTreeherder')

        self.url = self.options.treeherder_url
        if not self.url:
            LOGGER.debug('AutophoneTreeherder: no treeherder url')
            return

        self.client_id = self.options.treeherder_client_id
        self.secret = self.options.treeherder_secret
        self.retry_wait = self.options.treeherder_retry_wait

        self.client = TreeherderClient(server_url=self.url,
                                       client_id=self.client_id,
                                       secret=self.secret)

        LOGGER.debug('AutophoneTreeherder: %s', self)
    def __init__(self,
                 worker_subprocess,
                 options,
                 jobs,
                 s3_bucket=None,
                 mailer=None):
        assert options, "options is required."

        logger = utils.getLogger()

        self.options = options
        self.jobs = jobs
        self.s3_bucket = s3_bucket
        self.mailer = mailer
        self.worker = worker_subprocess
        self.shutdown_requested = False
        logger.debug('AutophoneTreeherder')

        self.url = self.options.treeherder_url
        if not self.url:
            logger.debug('AutophoneTreeherder: no treeherder url')
            return

        self.client_id = self.options.treeherder_client_id
        self.secret = self.options.treeherder_secret
        self.retry_wait = self.options.treeherder_retry_wait

        self.client = TreeherderClient(server_url=self.url,
                                       client_id=self.client_id,
                                       secret=self.secret)

        logger.debug('AutophoneTreeherder: %s', self)
Beispiel #8
0
    def submit(self, job):
        """Submit the job to treeherder.

        :param job: Treeherder job instance to use for submission.

        """
        job.add_submit_timestamp(int(time.time()))

        # We can only submit job info once, so it has to be done in completed
        if self._job_details:
            job.add_artifact('Job Info', 'json', {'job_details': self._job_details})

        job_collection = TreeherderJobCollection()
        job_collection.add(job)

        logger.info('Sending results to Treeherder: {}'.format(job_collection.to_json()))
        url = urlparse(self.url)
        client = TreeherderClient(protocol=url.scheme, host=url.hostname,
                                  client_id=self.client_id, secret=self.secret)
        client.post_collection(self.repository, job_collection)

        logger.info('Results are available to view at: {}'.format(
                    urljoin(self.url,
                            JOB_FRAGMENT.format(repository=self.repository,
                                                revision=self.revision))))
 def __init__(self, repo, platform, status_check):
     self.repo = repo
     self.platform = platform
     self.platform_option = 'opt'
     self.pushes = []
     self.skip_status_check = status_check
     self.thclient = TreeherderClient()
Beispiel #10
0
 def __init__(self, input_env_data):
     self.platform_option = 'opt'
     self.thclient = TreeherderClient()
     self.resultsets = []
     self.env_data = {
         key.upper(): value
         for key, value in input_env_data.items()
     }
     self.dispatch_variables(self.env_data)
Beispiel #11
0
 def __init__(self,
              server_url='https://treeherder.mozilla.org',
              treeherder_host=None):
     if treeherder_host:
         LOG.warning(
             "The `TreeherderApi()` parameter `treeherder_host` is deprecated. "
             "Use `server_url` instead, or omit entirely to use the default of "
             "production Treeherder.")
         server_url = 'https://%s' % treeherder_host
     self.treeherder_client = TreeherderClient(server_url=server_url)
Beispiel #12
0
def get_revision_hash(server_url, project, revision):
    """Retrieve the Treeherder's revision hash for a given revision.

    :param server_url: URL of the Treeherder instance.
    :param project: The project (branch) to use.
    :param revision: The revision to get the hash for.
    """
    client = TreeherderClient(server_url=server_url)
    resultsets = client.get_resultsets(project, revision=revision)

    return resultsets[0]['revision_hash']
Beispiel #13
0
 def __init__(self, ldap_auth, is_triggerbot_user=lambda _: True):
     self.revmap = defaultdict(dict)
     self.revmap_threshold = TreeWatcher.revmap_threshold
     self.auth = ldap_auth
     self.lower_trigger_limit = TreeWatcher.default_retry * TreeWatcher.per_push_failures
     self.log = logging.getLogger('trigger-bot')
     self.is_triggerbot_user = is_triggerbot_user
     self.global_trigger_count = 0
     self.treeherder_client = TreeherderClient()
     self.hidden_builders = set()
     self.refresh_builder_counter = 0
Beispiel #14
0
def get_job_url(task_id, run_id, **params):
    """Build a Treeherder job url for a given Taskcluster task"""
    treeherder_client = TreeherderClient()
    uuid = slugid.decode(task_id)

    # Fetch specific job id from treeherder
    job_details = treeherder_client.get_job_details(
        job_guid=f"{uuid}/{run_id}")
    if len(job_details) > 0:
        params["selectedJob"] = job_details[0]["job_id"]

    return f"https://treeherder.mozilla.org/#/jobs?{urlencode(params)}"
    def __init__(self, application, branch, platform, server_url=TREEHERDER_URL):
        """Create a new instance of the Treeherder class.

        :param application: The name of the application to download.
        :param branch: Name of the branch.
        :param platform: Platform of the application.
        :param server_url: The URL of the Treeherder instance to access.
        """
        self.logger = logging.getLogger(__name__)

        self.client = TreeherderClient(server_url=server_url)
        self.application = application
        self.branch = branch
        self.platform = platform
Beispiel #16
0
def make_task_graph(public_key, signing_pvt_key,
                    root_template="release_graph.yml.tmpl",
                    template_dir=DEFAULT_TEMPLATE_DIR,
                    **template_kwargs):
    # TODO: some validation of template_kwargs + defaults
    env = Environment(loader=FileSystemLoader(template_dir),
                      undefined=StrictUndefined,
                      extensions=['jinja2.ext.do'])
    th = TreeherderClient()

    now = arrow.now()
    now_ms = now.timestamp * 1000

    # Don't let the signing pvt key leak into the task graph.
    with open(signing_pvt_key) as f:
        pvt_key = f.read()

    template = env.get_template(root_template)
    template_vars = {
        "stableSlugId": stableSlugId(),
        "chunkify": chunkify,
        "sorted": sorted,
        "now": now,
        "now_ms": now_ms,
        # This is used in defining expirations in tasks. There's no way to
        # actually tell Taskcluster never to expire them, but 1,000 years
        # is as good as never....
        "never": arrow.now().replace(years=1000),
        # Treeherder expects 12 symbols in revision
        "revision_hash": th.get_resultsets(
            template_kwargs["branch"],
            revision=template_kwargs["revision"][:12])[0]["revision_hash"],
        "get_treeherder_platform": treeherder_platform,
        "encrypt_env_var": lambda *args: encryptEnvVar(*args,
                                                       keyFile=public_key),
        "buildbot2ftp": buildbot2ftp,
        "buildbot2bouncer": buildbot2bouncer,
        "sign_task": partial(sign_task, pvt_key=pvt_key),
    }
    template_vars.update(template_kwargs)

    return yaml.safe_load(template.render(**template_vars))
Beispiel #17
0
    def __init__(self,
                 application,
                 branch,
                 platform,
                 host=TREEHERDER_HOST,
                 protocol='https'):
        """Create a new instance of the Treeherder class.

        :param application: The name of the application to download.
        :param branch: Name of the branch.
        :param platform: Platform of the application.
        :param host: The Treeherder host to make use of.
        :param protocol: The protocol for the Treeherder host.
        """
        self.logger = logging.getLogger(__name__)

        self.client = TreeherderClient(host=host, protocol=protocol)
        self.application = application
        self.branch = branch
        self.platform = platform
Beispiel #18
0
def get_job_url(repository, revision, task_id=None, run_id=None, **params):
    """Build a Treeherder job url for a given Taskcluster task"""
    assert isinstance(repository, str) and repository, "Missing repository"
    assert isinstance(revision, str) and revision, "Missing revision"
    assert "repo" not in params, "repo cannot be set in params"
    assert "revision" not in params, "revision cannot be set in params"

    params.update({"repo": repository, "revision": revision})

    if task_id is not None and run_id is not None:
        treeherder_client = TreeherderClient()
        uuid = slugid.decode(task_id)

        # Fetch specific job id from treeherder
        job_details = treeherder_client.get_job_details(
            job_guid=f"{uuid}/{run_id}")
        if len(job_details) > 0:
            params["selectedJob"] = job_details[0]["job_id"]

    return f"{JOBS_URL}?{urlencode(params)}"
Beispiel #19
0
    def __init__(self, repository, revision, settings, treeherder_url,
                 treeherder_client_id, treeherder_secret):
        """Creates new instance of the submission class.

        :param repository: Name of the repository the build has been built from.
        :param revision: Changeset of the repository the build has been built from.
        :param settings: Settings for the Treeherder job as retrieved from the config file.
        :param treeherder_url: URL of the Treeherder instance.
        :param treeherder_client_id: The client ID necessary for the Hawk authentication.
        :param treeherder_secret: The secret key necessary for the Hawk authentication.

        """
        self.repository = repository
        self.revision = revision
        self.settings = settings

        self._job_details = []

        self.client = TreeherderClient(server_url=treeherder_url,
                                       client_id=treeherder_client_id,
                                       secret=treeherder_secret)
Beispiel #20
0
def on_event(data, message, dry_run, treeherder_server_url, **kwargs):
    if ignored(data):
        return 0  # SUCCESS

    # Cleaning mozci caches
    buildjson.BUILDS_CACHE = {}
    query_jobs.JOBS_CACHE = {}
    repo_name = data["project"]
    action = data["action"]
    times = data["times"]
    # Pulse gives us resultset_id, we need to get revision from it.
    resultset_id = data["resultset_id"]

    treeherder_client = TreeherderClient(server_url=treeherder_server_url)

    LOG.info(
        "%s action requested by %s on repo_name %s with resultset_id: %s" %
        (data['action'], data["requester"], data["project"],
         data["resultset_id"]))
    revision = treeherder_client.get_resultsets(repo_name,
                                                id=resultset_id)[0]["revision"]

    if action == "trigger_missing_jobs":
        mgr = BuildAPIManager()
        mgr.trigger_missing_jobs_for_revision(repo_name,
                                              revision,
                                              dry_run=dry_run)

    elif action == "trigger_all_talos_jobs":
        trigger_all_talos_jobs(repo_name=repo_name,
                               revision=revision,
                               times=times,
                               priority=-1,
                               dry_run=dry_run)
    else:
        raise Exception(
            'We were not aware of the "{}" action. Please address the code.'.
            format(action))

    return 0  # SUCCESS
Beispiel #21
0
def query_repositories(clear_cache=False):
    """
    Return dictionary with information about the various repositories.

    The data about a repository looks like this:

    .. code-block:: python

        "ash": {
            "repo": "https://hg.mozilla.org/projects/ash",
            "graph_branches": ["Ash"],
            "repo_type": "hg"
        }

    """
    LOG.debug("Query repositories")
    global REPOSITORIES

    if clear_cache:
        REPOSITORIES = {}

    if REPOSITORIES:
        return REPOSITORIES

    th_client = TreeherderClient()
    treeherderRepos = th_client.get_repositories()
    REPOSITORIES = {}
    for th_repo in treeherderRepos:
        if th_repo['active_status'] == "active":
            REPOSITORIES[th_repo['name']] = {
                'repo': th_repo['url'],
                'repo_type': th_repo['dvcs_type'],
                'graph_branches': [th_repo['name'].capitalize()],
            }

    return REPOSITORIES
def on_event(data, message, dry_run, treeherder_server_url, acknowledge,
             **kwargs):
    """Act upon Treeherder job events.

    Return if the outcome was successful or not
    """
    LOG.info('Acknowledge value: {}'.format(acknowledge))

    exit_code = 0  # SUCCESS

    if ignored(data):
        if acknowledge:
            # We need to ack the message to remove it from our queue
            LOG.info('Message acknowledged')
            message.ack()
        return exit_code

    # Cleaning mozci caches
    buildjson.BUILDS_CACHE = {}
    query_jobs.JOBS_CACHE = {}

    treeherder_client = TreeherderClient(server_url=treeherder_server_url)

    action = data['action'].capitalize()
    job_id = data['job_id']
    repo_name = data['project']
    status = None

    # We want to know the status of the job we're processing
    try:
        job_info = treeherder_client.get_jobs(repo_name, id=job_id)[0]
    except IndexError:
        LOG.info("We could not find any job_info for repo_name: %s and "
                 "job_id: %s" % (repo_name, job_id))
        return exit_code

    buildername = job_info["ref_data_name"]

    # We want to know the revision associated for this job
    result_sets = treeherder_client.get_resultsets(
        repo_name, id=job_info["result_set_id"])
    revision = result_sets[0]["revision"]

    link_to_job = '{}/#/jobs?repo={}&revision={}&selectedJob={}'.format(
        treeherder_server_url, repo_name, revision, job_id)

    LOG.info("{} action requested by {} for '{}'".format(
        action,
        data['requester'],
        buildername,
    ))
    LOG.info('Request for {}'.format(link_to_job))

    buildername = filter_invalid_builders(buildername)

    if buildername is None:
        LOG.info('Treeherder can send us invalid builder names.')
        LOG.info('See https://bugzilla.mozilla.org/show_bug.cgi?id=1242038.')
        LOG.warning('Requested job name "%s" is invalid.' %
                    job_info['ref_data_name'])
        exit_code = -1  # FAILURE

    # There are various actions that can be taken on a job, however, we currently
    # only process the backfill one
    elif action == "Backfill":
        exit_code = manual_backfill(
            revision=revision,
            buildername=buildername,
            dry_run=dry_run,
        )
        if not dry_run:
            status = 'Backfill request sent'
        else:
            status = 'Dry-run mode, nothing was backfilled.'
        LOG.debug(status)

    else:
        LOG.error('We were not aware of the "{}" action. Please file an issue'.
                  format(action))
        exit_code = -1  # FAILURE

    if acknowledge:
        # We need to ack the message to remove it from our queue
        LOG.info('Message acknowledged')
        message.ack()

    return exit_code
def process_datasets(server_url, client_id, secret, revisions, s3):
    client = TreeherderClient(server_url=server_url, client_id=client_id, secret=secret)
    return post_treeherder_jobs(client, revisions, s3)
Beispiel #24
0
def submit(perf_data, failures, revision, summary, engine):

    print("[DEBUG] failures:")
    print(list(map(lambda x: x['testcase'], failures)))

    author = "{} <{}>".format(revision['author']['name'],
                              revision['author']['email'])

    dataset = [{
        # The top-most revision in the list of commits for a push.
        'revision':
        revision['commit'],
        'author':
        author,
        'push_timestamp':
        int(revision['author']['timestamp']),
        'type':
        'push',
        # a list of revisions associated with the resultset. There should
        # be at least one.
        'revisions': [{
            'comment': revision['subject'],
            'revision': revision['commit'],
            'repository': 'servo',
            'author': author
        }]
    }]

    trsc = create_resultset_collection(dataset)

    result = "success"
    # TODO: verify a failed test won't affect Perfherder visualization
    # if len(failures) > 0:
    #     result = "testfailed"

    hashlen = len(revision['commit'])
    job_guid = ''.join(
        random.choice(string.ascii_letters + string.digits)
        for i in range(hashlen))

    if (engine == "gecko"):
        project = "servo"
        job_symbol = 'PLG'
        group_symbol = 'SPG'
        group_name = 'Servo Perf on Gecko'
    else:
        project = "servo"
        job_symbol = 'PL'
        group_symbol = 'SP'
        group_name = 'Servo Perf'

    dataset = [{
        'project': project,
        'revision': revision['commit'],
        'job': {
            'job_guid':
            job_guid,
            'product_name':
            project,
            'reason':
            'scheduler',
            # TODO: What is `who` for?
            'who':
            'Servo',
            'desc':
            'Servo Page Load Time Tests',
            'name':
            'Servo Page Load Time',
            # The symbol representing the job displayed in
            # treeherder.allizom.org
            'job_symbol':
            job_symbol,

            # The symbol representing the job group in
            # treeherder.allizom.org
            'group_symbol':
            group_symbol,
            'group_name':
            group_name,

            # TODO: get the real timing from the test runner
            'submit_timestamp':
            str(int(time.time())),
            'start_timestamp':
            str(int(time.time())),
            'end_timestamp':
            str(int(time.time())),
            'state':
            'completed',
            'result':
            result,  # "success" or "testfailed"
            'machine':
            'local-machine',
            # TODO: read platform from test result
            'build_platform': {
                'platform': 'linux64',
                'os_name': 'linux',
                'architecture': 'x86_64'
            },
            'machine_platform': {
                'platform': 'linux64',
                'os_name': 'linux',
                'architecture': 'x86_64'
            },
            'option_collection': {
                'opt': True
            },

            # jobs can belong to different tiers
            # setting the tier here will determine which tier the job
            # belongs to.  However, if a job is set as Tier of 1, but
            # belongs to the Tier 2 profile on the server, it will still
            # be saved as Tier 2.
            'tier':
            1,

            # the ``name`` of the log can be the default of "buildbot_text"
            # however, you can use a custom name.  See below.
            # TODO: point this to the log when we have them uploaded to S3
            'log_references': [{
                'url': 'TBD',
                'name': 'test log'
            }],
            # The artifact can contain any kind of structured data
            # associated with a test.
            'artifacts': [
                {
                    'type': 'json',
                    'name': 'performance_data',
                    # TODO: include the job_guid when the runner actually
                    # generates one
                    # 'job_guid': job_guid,
                    'blob': perf_data
                },
                {
                    'type': 'json',
                    'name': 'Job Info',
                    # 'job_guid': job_guid,
                    "blob": {
                        "job_details": [{
                            "content_type": "link",
                            "url": "https://www.github.com/servo/servo",
                            "value": "GitHub",
                            "title": "Source code"
                        }, {
                            "content_type": "raw_html",
                            "title": "Result Summary",
                            "value": summary
                        }]
                    }
                }
            ],
            # List of job guids that were coalesced to this job
            'coalesced': []
        }
    }]

    tjc = create_job_collection(dataset)

    # TODO: extract this read credential code out of this function.
    cred = {
        'client_id': os.environ['TREEHERDER_CLIENT_ID'],
        'secret': os.environ['TREEHERDER_CLIENT_SECRET']
    }

    client = TreeherderClient(server_url='https://treeherder.mozilla.org',
                              client_id=cred['client_id'],
                              secret=cred['secret'])

    # data structure validation is automatically performed here, if validation
    # fails a TreeherderClientError is raised
    client.post_collection('servo', trsc)
    client.post_collection('servo', tjc)
Beispiel #25
0
    parser = argparse.ArgumentParser(
        "Compare a push from a Treeherder instance to the production instance."
    )
    parser.add_argument(
        "--host", default="localhost", help="Host to compare. It defaults to localhost"
    )
    parser.add_argument("--revision", required=True, help="Revision to compare")
    parser.add_argument(
        "--project",
        default="mozilla-central",
        help="Project to compare. It defaults to mozilla-central",
    )

    args = parser.parse_args()

    th_instance = TreeherderClient(server_url=HOSTS[args.host])
    th_instance_pushid = th_instance.get_pushes(args.project, revision=args.revision)[0]["id"]
    th_instance_jobs = (
        th_instance.get_jobs(args.project, push_id=th_instance_pushid, count=None) or []
    )

    production = TreeherderClient(server_url=HOSTS["production"])
    production_pushid = production.get_pushes(args.project, revision=args.revision)[0]["id"]
    production_jobs = production.get_jobs(args.project, push_id=production_pushid, count=None)

    production_dict = {}
    for job in production_jobs:
        production_dict[job["job_guid"]] = job

    th_instance_dict = {}
    th_instance_not_found = []
Beispiel #26
0
    filename = cacheName('pushes-%s.json' % date)
    if os.path.exists(filename):
        with open(filename, 'r') as f:
            pushes = json.load(f)
    else:
        pushes = client.get_pushes(branch,
                                   count=1000,
                                   push_timestamp__gte=start_date,
                                   push_timestamp__lte=end_date)
        with open(filename, 'w') as f:
            json.dump(pushes, f)

    return pushes


client = TreeherderClient(server_url='https://treeherder.mozilla.org')
branch = 'autoland'

dates = []
for iter in range(2, 32):
    if iter < 10:
        iter = "0%s" % iter
    dates.append('2019-01-%s' % iter)
for iter in range(1, 29):
    if iter < 10:
        iter = "0%s" % iter
    dates.append('2019-02-%s' % iter)
for iter in range(1, 32):
    if iter < 10:
        iter = "0%s" % iter
    dates.append('2019-03-%s' % iter)
Beispiel #27
0
import logging

from thclient import TreeherderClient

from fennec_aurora_task_creator.exceptions import NoTreeherderResultSetError, TooManyTreeherderResultSetsError

logger = logging.getLogger(__name__)

_client = TreeherderClient()


def does_job_already_exist(repository, revision, job_name, tier=1):
    resultsets = _client.get_resultsets(
        project=repository,
        revision=revision,
    )

    if len(resultsets) == 0:
        raise NoTreeherderResultSetError(repository, revision)
    elif len(resultsets) != 1:
        raise TooManyTreeherderResultSetsError(repository, revision)

    jobs = _client.get_jobs(repository,
                            count=2000,
                            result_set_id=resultsets[0]['id'],
                            tier=tier)

    return _is_job_in_list(jobs, job_name)


def _is_job_in_list(jobs, expected_job_name):
Beispiel #28
0
    def get_test_packages_url(self, properties):
        """Return the URL of the test packages JSON file.

        In case of localized daily builds we can query the en-US build to get
        the URL, but for candidate builds we need the tinderbox build
        of the first parent changeset which was not checked-in by the release
        automation process (necessary until bug 1242035 is not fixed).
        """
        if properties.get('test_packages_url'):
            url = properties['test_packages_url']
        else:
            overrides = {
                'locale': 'en-US',
                'extension': 'test_packages.json',
            }

            # Use Treeherder to query for the next revision which has Tinderbox builds
            # available. We can use this revision to retrieve the test-packages URL.
            if properties['tree'].startswith('release-'):
                platform_map = {
                    'linux': {
                        'build_platform': 'linux32'
                    },
                    'linux64': {
                        'build_platform': 'linux64'
                    },
                    'macosx': {
                        'build_os': 'mac',
                        'build_architecture': 'x86_64'
                    },
                    'macosx64': {
                        'build_os': 'mac',
                        'build_architecture': 'x86_64'
                    },
                    'win32': {
                        'build_os': 'win',
                        'build_architecture': 'x86'
                    },
                    'win64': {
                        'build_os': 'win',
                        'build_architecture': 'x86_64'
                    },
                }

                self.logger.info(
                    'Querying tinderbox revision for {} build...'.format(
                        properties['tree']))
                revision = properties['revision'][:12]

                client = TreeherderClient(
                    server_url='https://treeherder.mozilla.org')
                resultsets = client.get_resultsets(properties['branch'],
                                                   tochange=revision,
                                                   count=50)

                # Retrieve the option hashes to filter for opt builds
                option_hash = None
                for key, values in client.get_option_collection_hash(
                ).iteritems():
                    for value in values:
                        if value['name'] == 'opt':
                            option_hash = key
                            break
                    if option_hash:
                        break

                # Set filters to speed-up querying jobs
                kwargs = {
                    'job_type_name': 'Build',
                    'exclusion_profile': False,
                    'option_collection_hash': option_hash,
                    'result': 'success',
                }
                kwargs.update(platform_map[properties['platform']])

                for resultset in resultsets:
                    kwargs.update({'result_set_id': resultset['id']})
                    jobs = client.get_jobs(properties['branch'], **kwargs)
                    if len(jobs):
                        revision = resultset['revision']
                        break

                self.logger.info(
                    'Found revision for tinderbox build: {}'.format(revision))

                overrides['build_type'] = 'tinderbox'
                overrides['revision'] = revision

            # For update tests we need the test package of the target build. That allows
            # us to add fallback code in case major parts of the ui are changing in Firefox.
            if properties.get('target_buildid'):
                overrides['build_id'] = properties['target_buildid']

            # The test package json file has a prefix with bug 1239808 fixed. Older builds need
            # a fallback to a prefix-less filename.
            try:
                self.logger.info('Querying test packages URL...')
                url = self.query_file_url(properties,
                                          property_overrides=overrides)
            except download_errors.NotFoundError:
                self.logger.info(
                    'URL not found. Querying not-prefixed test packages URL...'
                )
                extension = overrides.pop('extension')
                build_url = self.query_file_url(properties,
                                                property_overrides=overrides)
                url = '{}/{}'.format(build_url[:build_url.rfind('/')],
                                     extension)
                r = requests.head(url)
                if r.status_code != 200:
                    url = None

            self.logger.info('Found test package URL at: {}'.format(url))

        return url
Beispiel #29
0
def compare(test, buildername, revision, previous_revision):
    "This function will compare between 2 given revisions and return result as percentage"
    repo_name = query_repo_name_from_buildername(buildername)
    # Using TWO_WEEKS as interval, may change it afterwards
    signature_request_url = SIGNATURE_URL % (repo_name, TWO_WEEKS)
    signatures = fetch_json(signature_request_url)
    options_collection_hash_list = fetch_json(OPTION_COLLECTION_HASH)

    for signature, value in signatures.iteritems():
        # Skip processing subtests. They are identified by 'test' key in the dicitonary.
        if 'test' in value:
            continue

        # Ignoring e10s here.
        # TODO: Revisit this later
        if TBPL_TESTS[test]['testname'].lower() == value['suite'].lower() and \
           TREEHERDER_PLATFORM[value["machine_platform"]] in buildername and \
           'test_options' not in value:
            test_signature = signature
        else:
            continue

        hash_signature = value['option_collection_hash']
        for key in options_collection_hash_list:
            if hash_signature == key["option_collection_hash"]:
                typeOfTest = key["options"][0]["name"]
                break

        if typeOfTest == 'pgo' and typeOfTest not in buildername:
            # if pgo, it should be present in buildername
            continue
        elif typeOfTest == 'opt':
            # if opt, nothing present in buildername
            break
        else:
            # We do not run talos on any branch other than pgo and opt.
            continue

    # Using TWO_WEEKS as interval, may change it afterwards
    req = fetch_json(PERFORMANCE_DATA % (repo_name, TWO_WEEKS, test_signature))
    performance_data = req[test_signature]
    treeherder_client = TreeherderClient()
    revision_resultset_id = treeherder_client.get_resultsets(repo_name, revision=revision)[0]["id"]
    previous_revision_resultset_id = treeherder_client.get_resultsets(repo_name, revision=previous_revision)[0]["id"]
    revision_perfdata = []
    previous_revision_perfdata = []

    for data in performance_data:
        if data["result_set_id"] == revision_resultset_id:
            revision_perfdata.append(data["value"])
        elif data["result_set_id"] == previous_revision_resultset_id:
            previous_revision_perfdata.append(data["value"])

    if revision_perfdata and previous_revision_perfdata:
        mean_revision_perfdata = sum(revision_perfdata) / float(len(revision_perfdata))
        mean_previous_revision_perfdata = sum(previous_revision_perfdata) / float(len(previous_revision_perfdata))
    else:
        print "previous_revision_perfdata: %s" % previous_revision_perfdata
        print "revision_perfdata: %s" % revision_perfdata
        return 0

    if test in REVERSE_TESTS:
        # lower value results in regression
        return (mean_revision_perfdata - mean_previous_revision_perfdata) * 100.0 / mean_previous_revision_perfdata
    else:
        # higher value results in regression
        return (mean_previous_revision_perfdata - mean_revision_perfdata) * 100.0 / mean_previous_revision_perfdata
 def __init__(self):
     self.treeherder_client = TreeherderClient()