コード例 #1
0
def _RetrieveTreeStatus(tree_name, start_time, end_time=None):
    """Returns a time-ascending-sorted TreeStatus list since checking point."""
    url = _MONITORED_TREES[tree_name]
    params = {
        'limit': 1000,  # 1000 is large enough to get all recent tree statuses.
        'format': 'json',
        # Tree status app treats endTime as the beginning of the time range.
        'endTime': time_util.ConvertToTimestamp(start_time),
    }
    if end_time:
        # Tree status app treats startTime as the end of the time range.
        params['startTime'] = time_util.ConvertToTimestamp(end_time)
    http_client = FinditHttpClient()
    status_code, content, _response_headers = http_client.Get(url,
                                                              params=params)
    if status_code == 200:
        all_statuses = map(_CreateTreeStatus, json.loads(content))
        all_statuses.sort(key=lambda s: s.time)
        # With 'endTime' set, the Tree status app always includes a duplicate entry
        # for the latest status.
        return all_statuses[:-1]
    else:
        logging.error('Failed to retrieve tree status for %s from %r to %r',
                      tree_name, start_time, end_time)
        return []  # Wait for next execution.
コード例 #2
0
def _CallSwarmbucketAPI(base_url, api_name, request_data, method='POST'):
    assert method in ['POST',
                      'GET'], 'Only POST and GET requests are supported'
    endpoint = '%s/%s' % (base_url, api_name)
    headers = {'Content-Type': 'application/json; charset=UTF-8'}
    if method == 'POST':
        data = json.dumps(request_data)
        status_code, content, _response_headers = FinditHttpClient().Post(
            endpoint, data, headers=headers)
    elif method == 'GET':
        status_code, content, _response_headers = FinditHttpClient().Get(
            endpoint, request_data, headers=headers)
    if status_code == 200:
        return json.loads(content)
    return {}
コード例 #3
0
ファイル: try_job.py プロジェクト: xinghun61/infra
def OnTryJobCompleted(params, try_job_data, build, error):
    try_job_id = params['try_job_id']
    try_job_type = params['try_job_type']

    # We want to retry 404s due to logdog's propagation delay (inherent to
    # pubsub) of up to 3 minutes.
    http_client = FinditHttpClient(interceptor=HttpClientMetricsInterceptor(
        no_retry_codes=[200, 302, 401, 403, 409, 501]))

    try:
        report = step_util.GetStepLogForLuciBuild(try_job_id, 'report',
                                                  http_client, 'report')
        if report:
            _RecordCacheStats(build, report)
    except (ValueError, TypeError) as e:
        report = {}
        logging.exception('Failed to load result report for tryjob/%s '
                          'due to exception %s.' % (try_job_id, e.message))

    error_dict, error_code = _GetError(buildbucket_build=build,
                                       buildbucket_error=error,
                                       timed_out=False,
                                       no_report=report == {})
    UpdateTryJobMetadata(try_job_data,
                         try_job_type,
                         buildbucket_build=build,
                         error_dict=error_dict,
                         error_code=error_code,
                         report=report or {})
    result_to_update = _UpdateTryJobEntity(params['urlsafe_try_job_key'],
                                           try_job_type, try_job_id, build.url,
                                           BuildbucketBuild.COMPLETED, report)
    return (result_to_update[-1],
            analysis_status.ERROR if error_dict else analysis_status.COMPLETED)
コード例 #4
0
def _FetchPatchsetFiles(host, project, change, patchset_revision):
  """Fetches the list of files modified, added or deleted by a patchset.

  Args:
    host (str): The url of the host.
    project (str): The project name.
    change (int): The change number.
    patchset_revision (str): The commit id of the patchset.

  Returns:
    A list of dict that conforms to the FileInfo object:
    https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#file-info
  """
  # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-files
  template_to_get_change = 'https://%s/changes/%s/revisions/%s/files'
  url = template_to_get_change % (host, _GetChangeId(project, change),
                                  patchset_revision)
  status_code, response, _ = FinditHttpClient().Get(url)
  if status_code != 200:
    raise RuntimeError(
        'Failed to get change details with status code: %d, response: %s' %
        status_code, response)

  # Remove XSSI magic prefix
  if response.startswith(')]}\''):
    response = response[4:]

  return json.loads(response)
コード例 #5
0
def PullChangeLogs(start_revision,
                   end_revision,
                   repo_url=CHROMIUM_GIT_REPOSITORY_URL,
                   ref=None,
                   **kwargs):
    """Pulls change logs for CLs between start_revision and end_revision.

  Args:
    start_revision (str): Start revision of the range, excluded.
    end_revision (str): End revision of the range, included. If end_revision is
      None, pulls all changes after start_revision.
    repo_url (str): Url of the git repo. Default to chromium repo url.
    kwargs(dict): Keyword arguments passed as additional params for the query.
  Returns:
    A dict with the following form:
    {
      'git_hash_revision1': common.change_log.ChangeLog.ToDict(),
      ...
    }
  """
    if not start_revision:
        return {}

    git_repo = CachedGitilesRepository(FinditHttpClient(), repo_url, ref)
    change_logs = {}

    change_log_list = git_repo.GetChangeLogs(start_revision, end_revision,
                                             **kwargs)

    for change_log in change_log_list:
        change_logs[change_log.revision] = change_log

    return change_logs
コード例 #6
0
def ListTasks(experiment_id, isolate_hash):
    tag_filter = {'experiment_id': experiment_id + isolate_hash[:4]}
    return [
        i.task_id
        for i in swarming_util.ListTasks(swarming.SwarmingHost(), tag_filter,
                                         FinditHttpClient())
    ]
コード例 #7
0
ファイル: test_swarming.py プロジェクト: xinghun61/infra
def TriggerSwarmingTask(run_swarming_task_parameters, runner_id):
    """Triggers a swarming rerun for the given tests in a given build."""

    master_name, builder_name, build_number = (
        run_swarming_task_parameters.build_key.GetParts())
    step_name = run_swarming_task_parameters.step_name
    tests = run_swarming_task_parameters.tests

    assert tests, 'No tests to trigger swarming task for.'
    http_client = FinditHttpClient()

    # 1. Retrieve Swarming task parameters from a given Swarming task id.
    ref_task_id, ref_request = swarming.GetReferredSwarmingTaskRequestInfo(
        master_name, builder_name, build_number, step_name, http_client)

    # 2. Update/Overwrite parameters for the re-run.
    iterations_to_rerun = waterfall_config.GetSwarmingSettings().get(
        'iterations_to_rerun')
    new_request = CreateNewSwarmingTaskRequest(runner_id, ref_task_id,
                                               ref_request, master_name,
                                               builder_name, build_number,
                                               step_name, tests,
                                               iterations_to_rerun)

    # 3. Trigger a new Swarming task to re-run the failed tests.
    task_id, _ = swarming_util.TriggerSwarmingTask(swarming.SwarmingHost(),
                                                   new_request, http_client)

    if task_id:  # pragma: no branch.
        # 4. Update swarming task.
        OnSwarmingTaskTriggered(master_name, builder_name, build_number,
                                step_name, tests, task_id, iterations_to_rerun,
                                new_request)

    return task_id
コード例 #8
0
def _FetchChangeDetails(host, project, change):
  """Fetches change detail for a given change.

  Args:
    host (str): The url of the host.
    project (str): The project name.
    change (int): The change number.

  Returns:
    A dict whose format conforms to the ChangeInfo object:
    https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-info
  """
  # Uses the Get Change API to get and parse the details of this change.
  # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#get-change.
  template_to_get_change = (
      'https://%s/changes/%s?o=ALL_REVISIONS&o=SKIP_MERGEABLE')
  url = template_to_get_change % (host, _GetChangeId(project, change))
  status_code, response, _ = FinditHttpClient().Get(url)
  if status_code != 200:
    raise RuntimeError(
        'Failed to get change details with status code: %d' % status_code)

  # Remove XSSI magic prefix
  if response.startswith(')]}\''):
    response = response[4:]

  return json.loads(response)
コード例 #9
0
def TriggerTryJobs(try_jobs):
    """Triggers try-job in a batch.

  Args:
    try_jobs (list): a list of TryJob instances.

  Returns:
    A list of tuple (error, build) in the same order as the given try-jobs.
      error: an instance of BuildbucketError. None if no error occurred.
      build: an instance of BuildbucketBuild. None if error occurred.
  """
    json_results = []

    for try_job in try_jobs:
        status_code, content, _response_headers = FinditHttpClient().Put(
            _BUILDBUCKET_PUT_GET_ENDPOINT,
            json.dumps(try_job.ToBuildbucketRequest()),
            headers=_GetHeaders())
        if status_code == 200:  # pragma: no cover
            json_results.append(json.loads(content))
        else:
            error_content = {
                'error': {
                    'reason': status_code,
                    'message': content
                }
            }
            json_results.append(error_content)

    return _ConvertFuturesToResults(json_results)
コード例 #10
0
def MapCommitPositionsToGitHashes(end_revision,
                                  end_commit_position,
                                  start_commit_position,
                                  repo_url=CHROMIUM_GIT_REPOSITORY_URL,
                                  ref=None):
    """Gets git_hashes of commit_positions between start_commit_position and
    end_commit_position, both ends are included.

  Args:
    end_revision (str): Revision of the end commit.
    end_commit_position (int): Commit position of the end commit.
    start_commit_position (int): Commit position of the start commit.
      It cannot be greater than end_commit_position.
    repo_url (str): Url of the git repo. Default to chromium repo url.
    ref (str): Name of the ref.

  Returns:
    dict: A map of commit_positions in range to the corresponding git_hashes.
    For example, return
    {
      1: 'rev1',
      2: 'rev2',
      3: 'rev3'
    }
    if end_commit_position is 3 and start_commit_position is 1.
  """
    assert start_commit_position <= end_commit_position, (
        'start_commit_position {} is greater than end_commit_position {}'.
        format(start_commit_position, end_commit_position))
    git_repo = CachedGitilesRepository(FinditHttpClient(), repo_url, ref)
    commit_position_range = end_commit_position - start_commit_position + 1

    logs, _ = git_repo.GetNChangeLogs(end_revision, commit_position_range)
    return dict((log.commit_position, log.revision) for log in logs)
コード例 #11
0
def GetTryJobs(build_ids):
    """Returns the try-job builds for the given build ids.

  Args:
    build_ids (list): a list of build ids returned by Buildbucket.

  Returns:
    A list of tuple (error, build) in the same order as given build ids.
      error: an instance of BuildbucketError. None if no error occurred.
      build: an instance of BuildbucketBuild. None if error occurred.
  """
    json_results = []

    for build_id in build_ids:
        status_code, content, _response_headers = FinditHttpClient().Get(
            _BUILDBUCKET_PUT_GET_ENDPOINT + '/' + build_id,
            headers=_GetHeaders())
        if status_code == 200:  # pragma: no cover
            json_results.append(json.loads(content))
        else:
            error_content = {
                'error': {
                    'reason': status_code,
                    'message': content
                }
            }
            json_results.append(error_content)

    return _ConvertFuturesToResults(json_results)
コード例 #12
0
def CountRecentCommits(repo_url,
                       ref='refs/heads/master',
                       time_period=datetime.timedelta(hours=1)):
    """Gets the number of commits that landed recently.

  By default, this function will count the commits landed in the master ref
  during last hour, but can be used to count the commits landed in any ref in
  the most recent period of any arbitrary size.

  Args:
    repo_url (str): Url to the repo.
    ref (str): ref to count commits on.
    time_period (datetime.delta): window of time in which to count commits.

  Returns:
    An integer representing the number of commits that landed in the last
    hour.
  """
    count = 0
    cutoff = time_util.GetUTCNow() - time_period
    git_repo = NonCachedGitilesRepository(FinditHttpClient(), repo_url, ref)
    next_rev = ref
    while next_rev:
        # 100 is a reasonable size for a page.
        # This assumes that GetNChangeLogs returns changelogs in newer to older
        # order.
        logs, next_rev = git_repo.GetNChangeLogs(next_rev, 100)
        for log in logs:
            if log.committer.time >= cutoff:
                count += 1
            else:
                return count
    return count
コード例 #13
0
def SearchV2BuildsOnBuilder(builder,
                            status=None,
                            build_range=None,
                            create_time_range=None,
                            page_size=None,
                            fields=None):
    """Searches builds on a builder.

  Args:
    builder (build_pb2.BuilderID): Id of the builder, with project, bucket and
      builder_name.
    status (common_pb2.Status): Status of searched builds, like
      common_pb2.FAILURE. common_pb2.ENDED_MASK can be used when search for all
      completed builds regardless of status.
    build_range (tuple): A pair of build_ids for the range of the build.
    create_time_range (tuple): A pair of datetimes for the range of the build
      create_time. Both ends are optional. The format is like:
      (
         # Left bound of the range, inclusive.
         datetime(2019, 4, 8),
         # Right bound of the range, exclusive.
        datetime(2019, 4, 9)
      )
    page_size (int): Number of builds returned in one request.
    fields (google.protobuf.FieldMask): Mask for the paths to get, as not all
        fields are populated by default.
  """
    predicate = BuildPredicate(builder=builder, status=status)

    if build_range:
        if build_range[0]:  # pragma: no cover.
            # Left bound specified.
            predicate.build.start_build_id = int(build_range[0])
        if build_range[1]:
            # Right bound specified.
            predicate.build.end_build_id = int(build_range[1])

    if create_time_range:
        if create_time_range[0]:  # pragma: no cover.
            # Left bound specified.
            predicate.create_time.start_time.FromDatetime(create_time_range[0])
        if create_time_range[1]:
            # Right bound specified.
            predicate.create_time.end_time.FromDatetime(create_time_range[1])
    request = SearchBuildsRequest(predicate=predicate,
                                  page_size=page_size,
                                  fields=fields)

    status_code, content, response_headers = FinditHttpClient().Post(
        _BUILDBUCKET_V2_SEARCH_BUILDS_ENDPOINT,
        request.SerializeToString(),
        headers={'Content-Type': 'application/prpc; encoding=binary'})
    if status_code == 200 and response_headers.get(
            'X-Prpc-Grpc-Code') == GRPC_OK:
        result = SearchBuildsResponse()
        result.ParseFromString(content)
        return result
    logging.warning('Unexpected status_code: %d and prpc code: %s',
                    status_code, response_headers.get('X-Prpc-Grpc-Code'))
    return None
コード例 #14
0
ファイル: build_ahead.py プロジェクト: xinghun61/infra
def _TreeIsOpen():
    """Determine whether the chromium tree is currently open."""
    url = 'https://chromium-status.appspot.com/allstatus'
    params = {
        'limit': '1',
        'format': 'json',
    }
    client = FinditHttpClient()
    status_code, content, _response_headers = client.Get(url, params)

    if status_code == 200:
        try:
            states = json.loads(str(content))
            if states and states[0].get('general_state') == 'open':
                return True
        except ValueError, ve:
            logging.exception('Could not parse chromium tree status: %s', ve)
コード例 #15
0
ファイル: code_coverage.py プロジェクト: xinghun61/infra
def _RetrieveManifest(repo_url, revision, os_platform):  # pragma: no cover.
  """Returns the manifest of all the dependencies for the given revision.

  Args:
    repo_url (str): The url to the Gitiles project of the root repository.
    revision (str): The revision of the root repository.
    os_platform (str): The platform of the code checkout.

  Returns:
    A list of DependencyRepository instances ordered reversely by the relative
    path of each dependency checkout in the checkout of the root repository.
    The longer the relative path, the smaller index in the returned list.

    The reverse order is to make it easy to reliably determine which dependency
    a file is from, when given a file path relative to the root repository.
  """
  manifest = []

  root_dir = 'src/'

  def AddDependencyToManifest(path, url, revision):  # pragma: no cover.
    if path.startswith(root_dir):
      path = path[len(root_dir):]
    assert not path.startswith('//')
    path = '//' + path
    if not path.endswith('/'):
      path = path + '/'

    # Parse the url to extract the hostname and project name.
    # For "https://chromium.google.com/chromium/src.git", we get
    # ParseResult(netloc='chromium.google.com', path='/chromium/src.git', ...)
    result = urlparse.urlparse(url)
    assert result.path, 'No project extracted from %s' % url

    manifest.append(
        DependencyRepository(
            path=path,
            server_host=result.netloc,
            project=result.path[1:],  # Strip the leading '/'.
            revision=revision))

  # Add the root repository.
  AddDependencyToManifest('src/', repo_url, revision)

  # Add all the dependent repositories.
  # DEPS fetcher now assumes chromium/src and master branch.
  dep_fetcher = chrome_dependency_fetcher.ChromeDependencyFetcher(
      CachedGitilesRepository.Factory(FinditHttpClient()))
  deps = dep_fetcher.GetDependency(revision, os_platform)
  for path, dep in deps.iteritems():
    # Remove clause when crbug.com/929315 gets fixed.
    if path in _BLACKLISTED_DEPS.get(repo_url, []):
      continue
    AddDependencyToManifest(path, dep.repo_url, dep.revision)

  manifest.sort(key=lambda x: len(x.path), reverse=True)
  return manifest
コード例 #16
0
 def RunImpl(self, parameters):
     isolate_sha = swarming.GetIsolatedShaForStep(parameters.master_name,
                                                  parameters.builder_name,
                                                  parameters.build_number,
                                                  parameters.step_name,
                                                  FinditHttpClient())
     return GetIsolateShaOutput(isolate_sha=isolate_sha,
                                build_number=parameters.build_number,
                                build_url=parameters.url,
                                try_job_url=None)
コード例 #17
0
 def testSendRequestToServerRetryTimeout(self, mocked_post):
   mocked_post.return_value = (403, None, {})
   content, error = http_client_util.SendRequestToServer(
       'http://www.someurl.com',
       FinditHttpClient(403, None),
       post_data={
           'data': 'data'
       })
   self.assertIsNone(content)
   self.assertEqual(403, error['code'], {})
コード例 #18
0
def GetGitBlame(repo_url, revision, touched_file_path, ref=None):
    """Gets git blames of touched_file.

  Args:
    repo_url (str): Url to the repo.
    revision (str): Revision for the change.
    touched_file_path (str): Full path of a file in change_log.
  """
    git_repo = CachedGitilesRepository(FinditHttpClient(), repo_url, ref)
    return git_repo.GetBlame(touched_file_path, revision)
コード例 #19
0
def _GetChromiumWATCHLISTS():
    repo_url = 'https://chromium.googlesource.com/chromium/src'
    source = CachedGitilesRepository(FinditHttpClient(), repo_url).GetSource(
        'WATCHLISTS', 'master')
    if not source:
        return None

    # https://cs.chromium.org/chromium/src/WATCHLISTS is in python.
    definitions = ast.literal_eval(source).get('WATCHLIST_DEFINITIONS')
    return dict((k, v['filepath']) for k, v in definitions.iteritems())
コード例 #20
0
ファイル: flake_try_job.py プロジェクト: xinghun61/infra
def GetSwarmingTaskIdForTryJob(report, revision, step_name, test_name):
    """Check json output for each task and return id of the one with test result.

  Args:
    report (dict): A dict in the format:
        {
            'result': {
                revision: {
                    step_name: {
                        'valid': (bool),
                        'step_metadata': (dict),
                        'pass_fail_counts': {
                            test_name: {
                                'pass_count': (int),
                                'fail_count': (int),
                            }
                        }
                    }
                }
            }
        }
      revision (str): The git hash the try job ran.
      step_name (str): The name of the step the flaky test was found on.
      test_name (str): The name of the flaky test.

  Returns:
    The swarming task id (str) that the try job ran to determine the pass rate,
    or None if not found.
  """
    if not report:
        return None

    http_client = FinditHttpClient()

    step_result = report.get('result', {}).get(revision, {}).get(step_name, {})
    pass_fail_counts = step_result.get('pass_fail_counts', {}).get(test_name)
    task_ids = step_result.get('step_metadata', {}).get('swarm_task_ids', [])

    if len(task_ids) == 1:
        return task_ids[0]

    if not pass_fail_counts:  # Test doesn't exist.
        return task_ids[0] if task_ids else None

    for task_id in task_ids:
        output_json = swarmed_test_util.GetTestResultForSwarmingTask(
            task_id, http_client)
        test_results = test_results_util.GetTestResultObject(
            output_json, partial_result=True)
        if output_json and test_results and test_results.IsTestResultUseful():
            return task_id

    return None
コード例 #21
0
    def RunImpl(self, parameters):
        analysis = ndb.Key(urlsafe=parameters.analysis_urlsafe_key).get()
        assert analysis, 'Analysis unexpectedly missing!'

        git_repo = CachedGitilesRepository(
            FinditHttpClient(), constants.CHROMIUM_GIT_REPOSITORY_URL)
        change_log = git_repo.GetChangeLog(parameters.flakiness.revision)
        flakiness = parameters.flakiness
        data_point = data_point_util.ConvertFlakinessToDataPoint(flakiness)
        data_point.commit_timestamp = change_log.committer.time
        analysis.flakiness_verification_data_points.append(data_point)
        analysis.put()
コード例 #22
0
def GetCommitsInfo(revisions, repo_url=CHROMIUM_GIT_REPOSITORY_URL, ref=None):
    """Gets commit_positions and review urls for revisions."""
    git_repo = CachedGitilesRepository(FinditHttpClient(), repo_url, ref)
    cls = {}
    for revision in revisions:
        cls[revision] = {'revision': revision, 'repo_name': 'chromium'}
        change_log = git_repo.GetChangeLog(revision)
        if change_log:
            cls[revision]['commit_position'] = (change_log.commit_position)
            cls[revision]['url'] = (change_log.code_review_url
                                    or change_log.commit_url)
            cls[revision]['author'] = change_log.author.email
    return cls
コード例 #23
0
def _GetChromiumDirectoryToComponentMapping():
    """Returns a dict mapping from directories to components."""
    status, content, _ = FinditHttpClient().Get(_COMPONENT_MAPPING_URL)
    if status != 200:
        # None result won't be cached.
        return None
    mapping = json.loads(content).get('dir-to-component')
    if not mapping:
        return None
    result = {}
    for path, component in mapping.iteritems():
        path = path + '/' if path[-1] != '/' else path
        result[path] = component
    return result
コード例 #24
0
def _GetChangedLinesForDependencyRepo(roll, file_path_in_log, line_numbers):
    """Gets changed line numbers for file in failure log.

    Tests if the same lines mentioned in failure log are changed within
    the DEPS roll, if so, return those line numbers.
  """
    roll_repo = CachedGitilesRepository(FinditHttpClient(), roll['repo_url'])
    old_revision = roll['old_revision']
    new_revision = roll['new_revision']
    old_change_log = roll_repo.GetChangeLog(old_revision)
    old_rev_author_time = old_change_log.author.time
    new_change_log = roll_repo.GetChangeLog(new_revision)
    new_rev_author_time = new_change_log.author.time

    file_change_type = None
    changed_line_numbers = []

    if old_rev_author_time >= new_rev_author_time:
        # If the DEPS roll is downgrade, bail out.
        return file_change_type, changed_line_numbers

    changes_in_roll = roll_repo.GetChangeLogs(old_revision, new_revision)
    file_change_type, culprit_commit = _GetChangeTypeAndCulpritCommit(
        file_path_in_log, changes_in_roll)

    if culprit_commit is None:
        # Bail out if no commits touched the file in the log.
        return file_change_type, changed_line_numbers

    if file_change_type == ChangeType.MODIFY:
        # If the file was modified, use the blame information to determine which
        # lines were changed.
        blame = roll_repo.GetBlame(file_path_in_log, culprit_commit)

        if not blame:
            return file_change_type, changed_line_numbers

        revisions_in_roll = [change.revision for change in changes_in_roll]
        for region in blame:
            if line_numbers:
                for line_number in line_numbers:
                    if (line_number >= region.start
                            and line_number <= region.start + region.count - 1
                            and region.revision in revisions_in_roll):
                        # One line which appears in the failure log is changed within
                        # the DEPS roll.
                        changed_line_numbers.append(line_number)

    return file_change_type, changed_line_numbers
コード例 #25
0
def TriggerV2Build(builder,
                   gitiles_commit,
                   properties,
                   tags=None,
                   dimensions=None):
    """Triggers a build using buildbucket v2 API.

  Args:
    builder (build_pb2.BuilderID): Information about the builder the
      build runs on.
    gitiles_commit (common_pb2.GitilesCommit): Input commit the build runs.
    properties (dict): Input properties of the build.
    tags (list of dict): Tags for the build. In the format:
      [
        {
          'key': 'tag-key',
          'value': 'tag-value'
        },
        ...
      ]
    dimensions (list of dict): configured dimensions of the build. Format:
      [
        {
          'key': 'dimension-key',
          'value': 'dimension-value'
        },
        ...
      ]
  """
    request = ScheduleBuildRequest(builder=builder,
                                   gitiles_commit=gitiles_commit,
                                   tags=tags or [],
                                   dimensions=dimensions or [])
    request.properties.update(properties)

    status_code, content, response_headers = FinditHttpClient().Post(
        _BUILDBUCKET_V2_SCHEDULE_BUILD_ENDPOINT,
        request.SerializeToString(),
        headers={'Content-Type': 'application/prpc; encoding=binary'})

    if status_code == 200 and response_headers.get(
            'X-Prpc-Grpc-Code') == GRPC_OK:
        result = Build()
        result.ParseFromString(content)
        return result

    logging.warning('Unexpected status_code: %d and prpc code: %s',
                    status_code, response_headers.get('X-Prpc-Grpc-Code'))
    return None
コード例 #26
0
ファイル: build_util.py プロジェクト: xinghun61/infra
def GetLatestCommitPositionAndRevision(master_name, builder_name, target_name):
    """Gets the latest commit position and revision for a configuration.

  Args:
    master_name (str): The name of the master to query.
    builder_name (str): The name of the builder to query.
    target_name (str): The desired target name.

  Returns:
    (int, str): The latest commit position known and its corresponding revision.
  
  """
    latest_targets = (IsolatedTarget.FindLatestIsolateByMaster(
        master_name, builder_name, services_constants.GITILES_HOST,
        services_constants.GITILES_PROJECT, services_constants.GITILES_REF,
        target_name))

    if latest_targets:
        commit_position = latest_targets[0].commit_position
        revision = latest_targets[0].revision
        if not revision:
            # Historical data doesn't have revision.
            commit_info = crrev.RedirectByCommitPosition(
                FinditHttpClient(), commit_position)
            assert commit_info is not None, 'No info: r%d' % commit_position
            revision = commit_info['git_sha']

        return commit_position, revision

    # Fallback to buildbot for builds not yet migrated to LUCI.
    # TODO (crbug.com/804617): Remove fallback logic after migration is complete.
    luci_project, luci_bucket = buildbot.GetLuciProjectAndBucketForMaster(
        master_name)
    search_builds_response = buildbucket_client.SearchV2BuildsOnBuilder(
        BuilderID(project=luci_project,
                  bucket=luci_bucket,
                  builder=builder_name),
        page_size=1)

    if not search_builds_response:
        # Something is wrong. Calling code should be responsible for checking for
        # the return value.
        return None, None

    latest_build = search_builds_response.builds[0]
    revision = latest_build.input.gitiles_commit.id
    repo_url = git.GetRepoUrlFromV2Build(latest_build)
    return git.GetCommitPositionFromRevision(
        latest_build.input.gitiles_commit.id, repo_url=repo_url), revision
コード例 #27
0
  def setUp(self):
    super(StepMapperTest, self).setUp()
    self.http_client = FinditHttpClient()
    self.master_name = 'tryserver.m'
    self.wf_master_name = 'm'
    self.builder_name = 'b'
    self.build_number = 123
    self.step_name = 'browser_tests on platform'
    self.build_step = BuildStep.Create(self.master_name, self.builder_name,
                                       self.build_number, self.step_name, None)
    self.build_step.put()

    self.wf_build_step = BuildStep.Create(self.wf_master_name,
                                          self.builder_name, self.build_number,
                                          self.step_name, None)
    self.wf_build_step.put()
コード例 #28
0
    def testUpdateFirstFailureOnTestLevelFlaky(self):
        master_name = 'm'
        builder_name = 'b'
        build_number = 223
        step_name = 'abc_test'
        failed_step = {
            'current_failure': 223,
            'first_failure': 221,
            'supported': True,
            'tests': {
                'Unittest2.Subtest1': {
                    'current_failure': 223,
                    'first_failure': 223,
                    'last_pass': 223,
                    'base_test_name': 'Unittest2.Subtest1'
                }
            }
        }
        failed_step = TestFailedStep.FromSerializable(failed_step)
        step = WfStep.Create(master_name, builder_name, 222, step_name)
        step.isolated = True
        step.log_data = 'flaky'
        step.put()

        ci_test_failure._UpdateFirstFailureOnTestLevel(master_name,
                                                       builder_name,
                                                       build_number, step_name,
                                                       failed_step,
                                                       [223, 222, 221],
                                                       FinditHttpClient())

        expected_failed_step = {
            'current_failure': 223,
            'first_failure': 223,
            'last_pass': 222,
            'supported': True,
            'list_isolated_data': None,
            'tests': {
                'Unittest2.Subtest1': {
                    'current_failure': 223,
                    'first_failure': 223,
                    'last_pass': 222,
                    'base_test_name': 'Unittest2.Subtest1'
                }
            }
        }
        self.assertEqual(expected_failed_step, failed_step.ToSerializable())
コード例 #29
0
ファイル: logdog_util_test.py プロジェクト: xinghun61/infra
 def setUp(self):
   super(LogDogUtilTest, self).setUp()
   self.http_client = FinditHttpClient()
   self.master_name = 'tryserver.m'
   self.builder_name = 'b'
   self.build_number = 123
   self.step_name = 'browser_tests on platform'
   self.build_data = {
       'result_details_json':
           json.dumps({
               'properties': {
                   'log_location': 'logdog://h/p/path'
               }
           })
   }
   self.stdout_stream = 'stdout_stream'
   self.step_metadata_stream = 'step_metadata_stream'
コード例 #30
0
  def GetCompileFailures(self, build, compile_steps):
    """Returns the detailed compile failures from a failed build.

    For Chromium builds, the failure details are found in the ninja_info json
    log of the failed compile step.

    Although there's usually one compile step per build, this implementation
    can potentially handle multiple.
    """
    build_info = {
        'id': build.id,
        'number': build.number,
        'commit_id': build.input.gitiles_commit.id
    }
    ninja_infos = {}
    for step in compile_steps or []:
      for log in step.logs or []:
        if log.name.lower() == 'json.output[ninja_info]':
          ninja_infos[step.name] = logdog_util.GetLogFromViewUrl(
              log.view_url, FinditHttpClient())

    result = {}
    for step_name, ninja_info in ninja_infos.iteritems():
      if isinstance(ninja_info, basestring):
        ninja_info = json.loads(ninja_info)
      for failure in ninja_info.get('failures', []):
        failed_targets = failure.get('output_nodes')
        rule = failure.get('rule')
        if failed_targets:
          logging.info('Found the following failed targets in step %s: %s',
                       step_name, ', '.join(failed_targets))
          result.setdefault(
              step_name, {
                  'failures': {},
                  'last_passed_build': None,
                  'first_failed_build': build_info,
              })
          result[step_name]['failures'][frozenset(failed_targets)] = {
              'properties': {
                  'rule': rule
              },
              'first_failed_build': build_info,
              'last_passed_build': None,
          }
    return result