예제 #1
0
    def run(self, client_id, crash_keys, publish_to_client=False):
        """Reruns analysis for a batch of crashes.

    Args:
      client_id (CrashClient): The client whose crash we should iterate.
      crash_keys (list): A list of urlsafe encodings of crash keys.
    """
        client = PredatorForClientID(
            client_id, CachedGitilesRepository.Factory(HttpClientAppengine()),
            CrashConfig.Get())

        updated = []
        for key in crash_keys:
            key = ndb.Key(urlsafe=key)
            crash = key.get()
            crash.ReInitialize(client)
            updated.append(crash)

        ndb.put_multi(updated)

        for crash in updated:
            logging.info('Initialize analysis for crash %s', crash.identifiers)
            if publish_to_client:
                run_analysis = yield CrashAnalysisPipeline(
                    client_id, crash.identifiers)
                with pipeline.After(run_analysis):
                    yield PublishResultPipeline(client_id, crash.identifiers)
            else:
                yield CrashAnalysisPipeline(client_id, crash.identifiers)
예제 #2
0
 def __init__(self, client_id, crash_identifiers):
   super(CrashBasePipeline, self).__init__(client_id, crash_identifiers)
   self._crash_identifiers = crash_identifiers
   self._findit = FinditForClientID(
       client_id,
       CachedGitilesRepository.Factory(HttpClientAppengine()),
       CrashConfig.Get())
예제 #3
0
    def run(self, raw_crash_data):
        """Fire off pipelines to run the analysis and publish its results.

    N.B., due to the structure of AppEngine pipelines, this method must
    accept the same arguments as are passed to ``__init__``; however,
    because they were already passed to ``__init__`` there's no use in
    recieving them here. Thus, we discard all the arguments to this method
    (except for ``self``, naturally).
    """
        predator_client = PredatorForClientID(
            self._client_id,
            CachedGitilesRepository.Factory(HttpClientAppengine()),
            CrashConfig.Get())
        crash_data = predator_client.GetCrashData(raw_crash_data)

        need_analysis = predator_client.NeedsNewAnalysis(crash_data)
        if need_analysis:
            logging.info('New %s analysis is scheduled for %s',
                         self._client_id, crash_data.identifiers)

            UpdateCrashAnalysisData(crash_data, predator_client)
            run_analysis = yield CrashAnalysisPipeline(self._client_id,
                                                       crash_data.identifiers)
            with pipeline.After(run_analysis):
                yield PublishResultPipeline(self._client_id,
                                            crash_data.identifiers)
        else:
            yield PublishResultPipeline(self._client_id,
                                        crash_data.identifiers)
예제 #4
0
 def __init__(self, client_id, crash_identifiers):
     super(CrashBasePipeline, self).__init__(client_id, crash_identifiers)
     self._crash_identifiers = crash_identifiers
     self._predator = PredatorForClientID(
         client_id, CachedGitilesRepository.Factory(HttpClientAppengine()),
         CrashConfig.Get())
     self._predator.SetLog(self.log)
예제 #5
0
def _RetrieveManifest(repo_url, revision, os_platform):  # pragma: no cover.
  """Returns the manifest of all the dependencies for the given revision.

  Args:
    repo_url (str): The url to the Gitiles project of the root repository.
    revision (str): The revision of the root repository.
    os_platform (str): The platform of the code checkout.

  Returns:
    A list of DependencyRepository instances ordered reversely by the relative
    path of each dependency checkout in the checkout of the root repository.
    The longer the relative path, the smaller index in the returned list.

    The reverse order is to make it easy to reliably determine which dependency
    a file is from, when given a file path relative to the root repository.
  """
  manifest = []

  root_dir = 'src/'

  def AddDependencyToManifest(path, url, revision):  # pragma: no cover.
    if path.startswith(root_dir):
      path = path[len(root_dir):]
    assert not path.startswith('//')
    path = '//' + path
    if not path.endswith('/'):
      path = path + '/'

    # Parse the url to extract the hostname and project name.
    # For "https://chromium.google.com/chromium/src.git", we get
    # ParseResult(netloc='chromium.google.com', path='/chromium/src.git', ...)
    result = urlparse.urlparse(url)
    assert result.path, 'No project extracted from %s' % url

    manifest.append(
        DependencyRepository(
            path=path,
            server_host=result.netloc,
            project=result.path[1:],  # Strip the leading '/'.
            revision=revision))

  # Add the root repository.
  AddDependencyToManifest('src/', repo_url, revision)

  # Add all the dependent repositories.
  # DEPS fetcher now assumes chromium/src and master branch.
  dep_fetcher = chrome_dependency_fetcher.ChromeDependencyFetcher(
      CachedGitilesRepository.Factory(FinditHttpClient()))
  deps = dep_fetcher.GetDependency(revision, os_platform)
  for path, dep in deps.iteritems():
    # Remove clause when crbug.com/929315 gets fixed.
    if path in _BLACKLISTED_DEPS.get(repo_url, []):
      continue
    AddDependencyToManifest(path, dep.repo_url, dep.revision)

  manifest.sort(key=lambda x: len(x.path), reverse=True)
  return manifest
예제 #6
0
def _GetDependencies(chromium_revision, os_platform):
    """Returns the dependencies used by the specified chromium revision."""
    deps = {}
    dep_fetcher = chrome_dependency_fetcher.ChromeDependencyFetcher(
        CachedGitilesRepository.Factory(HttpClientAppengine()))
    for path, dependency in dep_fetcher.GetDependency(chromium_revision,
                                                      os_platform).iteritems():
        deps[path] = {
            'repo_url': dependency.repo_url,
            'revision': dependency.revision,
        }

    return deps
예제 #7
0
  def HandleGet(self):
    """Update the repo_to_dep_path in config from the lastest DEPS."""
    # Update repo_to_dep_path to the latest information.
    dep_fetcher = ChromeDependencyFetcher(
      CachedGitilesRepository.Factory(HttpClientAppengine()))

    repo_to_dep_path = GetRepoToDepPath(dep_fetcher)
    if not repo_to_dep_path:  # pragma: no cover.
      return self.CreateError('Fail to update repo_to_dep_path config.', 400)

    crash_config = CrashConfig.Get()
    crash_config.Update(users.User(app_identity.get_service_account_name()),
                        True, repo_to_dep_path=repo_to_dep_path)
예제 #8
0
def ExtractDepsInfo(failure_info, change_logs):
    """
  Args:
    failure_info (BaseFailureInfo): Information about all build failures.
    change_logs (dict): Result of PullChangeLogs().

  Returns:
    A dict with the following form:
    {
      'deps': {
        'path/to/dependency': {
          'revision': 'git_hash',
          'repo_url': 'https://url/to/dependency/repo.git',
        },
        ...
      },
      'deps_rolls': {
        'git_revision': [
          {
            'path': 'src/path/to/dependency',
            'repo_url': 'https://url/to/dependency/repo.git',
            'new_revision': 'git_hash1',
            'old_revision': 'git_hash2',
          },
          ...
        ],
        ...
      }
    }
  """
    chromium_revision = failure_info.chromium_revision
    os_platform = GetOSPlatformName(failure_info.master_name,
                                    failure_info.builder_name)

    dep_fetcher = chrome_dependency_fetcher.ChromeDependencyFetcher(
        CachedGitilesRepository.Factory(FinditHttpClient()))

    return {
        'deps': GetDependencies(chromium_revision, os_platform, dep_fetcher),
        'deps_rolls': DetectDependencyRolls(change_logs, os_platform,
                                            dep_fetcher)
    }
예제 #9
0
def NeedNewAnalysis(json_crash_data):
  """Checks if an analysis is needed for this crash.

  Args:
    json_crash_data (dict): Crash information from clients.

  Returns:
    True if a new analysis is needed; False otherwise.
  """
  if json_crash_data.get('redo'):
    logging.info('Force redo crash %s',
                 repr(json_crash_data['crash_identifiers']))
    return True

  # N.B., must call FinditForClientID indirectly, for mock testing.
  findit_client = crash_pipeline.FinditForClientID(
      json_crash_data['client_id'],
      CachedGitilesRepository.Factory(HttpClientAppengine()), CrashConfig.Get())
  crash_data = findit_client.GetCrashData(json_crash_data)
  # Detect the regression range, and decide if we actually need to
  # run a new analysis or not.
  return findit_client.NeedsNewAnalysis(crash_data)
예제 #10
0
def _DetectDependencyRolls(change_logs, os_platform):
    """Detect DEPS rolls in the given CL change logs.

  Args:
    change_logs (dict): Output of pipeline PullChangelogPipeline.run().

  Returns:
    A dict in the following form:
    {
      'git_revision': [
        {
          'path': 'src/path/to/dependency/',
          'repo_url': 'https://url/to/dependency/repo.git',
          'new_revision': 'git_hash1',
          'old_revision': 'git_hash2',
        },
        ...
      ],
      ...
    }
  """
    deps_rolls = {}
    dep_fetcher = chrome_dependency_fetcher.ChromeDependencyFetcher(
        CachedGitilesRepository.Factory(HttpClientAppengine()))
    for revision, change_log in change_logs.iteritems():
        # Check DEPS roll only if the chromium DEPS file is changed by the CL.
        for touched_file in change_log['touched_files']:
            if touched_file['new_path'] == 'DEPS':
                # In git, r^ refers to the previous revision of r.
                old_revision = '%s^' % revision
                rolls = dep_fetcher.GetDependencyRolls(old_revision, revision,
                                                       os_platform)
                deps_rolls[revision] = [roll.ToDict() for roll in rolls]
                break

    return deps_rolls
예제 #11
0
import logging
import mock
from testing_utils import testing

from common.findit_http_client import FinditHttpClient
from gae_libs.gitiles.cached_gitiles_repository import CachedGitilesRepository
from libs.deps import chrome_dependency_fetcher
from libs.deps.dependency import Dependency
from libs.gitiles.diff import ChangeType
from services import deps
from services.parameters import TestFailureInfo
from services.test.build_failure_analysis_test import ChangeLogFromDict

_DEP_FETCHER = chrome_dependency_fetcher.ChromeDependencyFetcher(
    CachedGitilesRepository.Factory(FinditHttpClient()))


class DepsTest(testing.AppengineTestCase):
    def testGetOSPlatformName(self):
        master_name = 'chromium.linux'
        builder_name = 'android'
        self.assertEqual('android',
                         deps.GetOSPlatformName(master_name, builder_name))

    def testGetOSPlatformNameDefault(self):
        master_name = 'chromium.linux'
        builder_name = 'linux'
        self.assertEqual('unix',
                         deps.GetOSPlatformName(master_name, builder_name))