示例#1
0
  def UpdateLatestFilesForBot(self, config, versions):
    """Update the LATEST files, for a given bot, in Google Storage.

    Args:
      config: The builder config to update.
      versions: Versions of ChromeOS to look at, sorted in descending order.
    """
    base_url = archive_lib.GetBaseUploadURI(config)
    acl = archive_lib.GetUploadACL(config)
    latest_url = None
    # gs.GSContext skips over all commands (including read-only checks)
    # when dry_run is True, so we have to create two context objects.
    # TODO(davidjames): Fix this.
    gs_ctx = gs.GSContext()
    copy_ctx = gs.GSContext(dry_run=self._dryrun)
    for version in reversed(versions):
      url = os.path.join(base_url, 'LATEST-%s' % version)
      found = gs_ctx.Exists(url, print_cmd=False)
      if not found and latest_url:
        try:
          copy_ctx.Copy(latest_url, url, version=0, acl=acl)
          logging.info('Copied %s -> %s', latest_url, url)
        except gs.GSContextPreconditionFailed:
          found = True

      if found:
        logging.info('Found %s', url)
        latest_url = url
示例#2
0
 def testInitAclFile(self):
     """Test ACL selection logic in __init__."""
     self.assertEqual(gs.GSContext().acl_file, None)
     self.assertEqual(
         gs.GSContext(acl_file=self.acl_file).acl_file, self.acl_file)
     self.assertRaises(gs.GSContextException,
                       gs.GSContext,
                       acl_file=self.bad_path)
示例#3
0
 def testInitBotoFileEnv(self):
     os.environ['BOTO_CONFIG'] = self.gsutil_bin
     self.assertTrue(gs.GSContext().boto_file, self.gsutil_bin)
     self.assertEqual(
         gs.GSContext(boto_file=self.acl_file).boto_file, self.acl_file)
     self.assertRaises(gs.GSContextException,
                       gs.GSContext,
                       boto_file=self.bad_path)
def GetCurrentVersion(paths, platform):
  """Find the current component version by iterating gsbucket root folder.

  Args:
    paths: ([str]) a list of folder paths strings.
    platform: (str) the platform for which the component is being built

  Returns:
    str: current component version.
    str: gs path for current component version.
  """
  current_version = distutils.version.LooseVersion('0.0.0.0')
  current_version_path = None

  for version_path in paths:
    if version_path[-1] != '/':
      logger.fatal("version_path (%s) needs to end with '/'.", version_path)
      continue
    version = os.path.basename(version_path[:-1])
    if len(ParseVersion(version)) < 3:
      # Path does not contain a component version.
      continue

    v = distutils.version.LooseVersion(version)
    if v > current_version:
      # Skip the version if the path for the target platform does not exist.
      ctx = gs.GSContext()
      src = os.path.join(version_path, platform, COMPONENT_ZIP)
      if not ctx.Exists(src):
        continue

      current_version = v
      current_version_path = version_path
  return str(current_version), current_version_path
def GetCurrentPackageVersion(current_version_path, platform):
  """Get package version of current component.

  Args:
    current_version_path: (str) path to current version component.
    platform: (str) platform name in omaha.

  Returns:
    str: package version of current component.
  """
  if current_version_path:
    ctx = gs.GSContext()
    src = os.path.join(current_version_path, platform, COMPONENT_ZIP)
    if ctx.Exists(src):
      with osutils.TempDir(prefix='component_') as tempdir:
        ctx.Copy(src, tempdir)
        cros_build_lib.run(
            ['unzip', '-o', '-d',
             tempdir, os.path.join(tempdir, COMPONENT_ZIP)],
            stdout=True, stderr=True)
        with open(os.path.join(tempdir, MANIFEST_FILE_NAME)) as f:
          manifest = json.load(f)
          if MANIFEST_PACKAGE_VERSION_FIELD in manifest:
            return manifest[MANIFEST_PACKAGE_VERSION_FIELD]
  return '0.0.0.0'
示例#6
0
def ParseURL(url):
    """Parse the files specified by a URL or filename.

  If url is a gs:// URL, globbing is supported.

  Args:
    url: a string of a GS URL or a flat filename.

  Returns:
    a list of Log namedtuples.
  """
    logs = []
    if GS_RE.match(url):
        ctx = gs.GSContext()
        try:
            files = ctx.LS(url)
        except gs.GSNoSuchKey:
            files = []
        for filename in files:
            try:
                content = ctx.Cat(filename)
                logs.extend(ParseFileContents(filename, content))
            except gs.GSNoSuchKey:
                logging.warning("Couldn't find file %s for url %s.", filename,
                                url)

    else:
        with open(url) as f:
            content = f.read()
        logs.extend(ParseFileContents(url, content))
    return logs
示例#7
0
    def testCrosVersionToChromeCommitFail(self):
        """Tests failure case of CrosVersionToChromeCommit()."""
        metadata_url = (
            'gs://chromeos-image-archive/%s-release/%s/partial-metadata.json' %
            (self.BOARD, self.GOOD_CROS_VERSION))
        gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
        gs_mock.AddCmdResult(['cat', metadata_url], returncode=1)

        self.bisector.gs_ctx = gs.GSContext()
        self.assertIsNone(
            self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION))

        metadata_content = 'not_a_json'
        gs_mock.AddCmdResult(['cat', metadata_url], output=metadata_content)
        self.assertIsNone(
            self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION))

        metadata_content = '\n'.join([
            '{', '  "metadata-version": "2",',
            '  "toolchain-url": "2017/05/%(target)s-2017.05.25.101355.tar.xz",',
            '  "suite_scheduling": true,', '  "build_id": 1644146,',
            '  "version": {}', '}'
        ])
        gs_mock.AddCmdResult(['cat', metadata_url], output=metadata_content)
        self.assertIsNone(
            self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION))

        gs_mock.AddCmdResult(['cat', metadata_url],
                             output=self.GOOD_METADATA_CONTENT)
        git_mock = self.StartPatcher(
            git_bisector_unittest.GitMock(self.repo_dir))
        git_mock.AddRunGitResult(
            ['log', '--oneline', '-n', '2', '60.0.3112.53'], returncode=128)
        self.assertIsNone(
            self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION))
示例#8
0
def Copy(src_uri, dest_uri):
    """Copy one uri to another.

  Args:
    src_uri: URI to copy from.
    dest_uri: Path to copy to.

  Raises:
    NotSupportedBetweenTypes if Cmp cannot be done between the two
      URIs provided.
  """
    uri_type1 = GetUriType(src_uri)
    uri_type2 = GetUriType(dest_uri)
    uri_types = set([uri_type1, uri_type2])

    if TYPE_GS in uri_types:
        # GS only supported between other GS files or local files.
        if len(uri_types) == 1 or TYPE_LOCAL in uri_types:
            ctx = gs.GSContext()
            return ctx.Copy(src_uri, dest_uri)

    if TYPE_LOCAL in uri_types and len(uri_types) == 1:
        return filelib.Copy(src_uri, dest_uri)

    if uri_type1 in (TYPE_HTTP, TYPE_HTTPS) and uri_type2 == TYPE_LOCAL:
        # Download file from URL.
        return URLRetrieve(src_uri, dest_uri)

    raise NotSupportedBetweenTypes(uri_type1, uri_type2)
示例#9
0
def _InstallOne(sysroot, debug, args):
    """Parallelizable wrapper for the DebugSymbolsInstaller.Install method."""
    vartree = GetVartree(sysroot)
    gs_context = gs.GSContext(boto_file=vartree.settings['BOTO_CONFIG'])
    with DebugSymbolsInstaller(vartree, gs_context, sysroot,
                               not debug) as installer:
        installer.Install(*args)
    def __init__(self, build, work_dir=None, unique=None, ctx=None):
        """This initializer identifies the build an payload that need signatures.

    Args:
      build: An instance of gspaths.Build that defines the build.
      work_dir: A directory inside the chroot to be used for temporarily
                manipulating files. The directory should be cleaned by the
                caller. If it is not passed, a temporary directory will be
                created.
      unique: Force known 'unique' id. Mostly for unittests.
      ctx: GS Context to use for GS operations.
    """
        self._build = build
        self._ctx = ctx if ctx is not None else gs.GSContext()
        self._work_dir = work_dir or chroot_util.TempDirInChroot()

        build_signing_uri = gspaths.ChromeosReleases.BuildPayloadsSigningUri(
            self._build)

        # Uniquify the directory using our pid/thread-id. This can't collide
        # with other hosts because the build is locked to our host in
        # paygen_build.
        if unique is None:
            unique = '%d-%d' % (os.getpid(), threading.current_thread().ident)

        # This is a partial URI that is extended for a lot of other URIs we use.
        self.signing_base_dir = os.path.join(build_signing_uri, unique)

        self.archive_uri = os.path.join(self.signing_base_dir,
                                        'payload.hash.tar.bz2')
示例#11
0
def FindSymbolFiles(tempdir, paths):
  """Locate symbol files in |paths|

  This returns SymbolFile objects that contain file references which are valid
  after this exits. Those files may exist externally, or be created in the
  tempdir (say, when expanding tarballs). The caller must not consider
  SymbolFile's valid after tempdir is cleaned up.

  Args:
    tempdir: Path to use for temporary files.
    paths: A list of input paths to walk. Files are returned w/out any checks.
      Dirs are searched for files that end in ".sym". Urls are fetched and then
      processed. Tarballs are unpacked and walked.

  Yields:
    A SymbolFile for every symbol file found in paths.
  """
  cache_dir = path_util.GetCacheDir()
  common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
  tar_cache = cache.TarballCache(common_path)

  for p in paths:
    o = urllib.parse.urlparse(p)
    if o.scheme:
      # Support globs of filenames.
      ctx = gs.GSContext()
      for gspath in ctx.LS(p):
        logging.info('processing files inside %s', gspath)
        o = urllib.parse.urlparse(gspath)
        key = ('%s%s' % (o.netloc, o.path)).split('/')
        # The common cache will not be LRU, removing the need to hold a read
        # lock on the cached gsutil.
        ref = tar_cache.Lookup(key)
        try:
          ref.SetDefault(gspath)
        except cros_build_lib.RunCommandError as e:
          logging.warning('ignoring %s\n%s', gspath, e)
          continue
        for sym in FindSymbolFiles(tempdir, [ref.path]):
          yield sym

    elif os.path.isdir(p):
      for root, _, files in os.walk(p):
        for f in files:
          if f.endswith('.sym'):
            # If p is '/tmp/foo' and filename is '/tmp/foo/bar/bar.sym',
            # display_path = 'bar/bar.sym'
            filename = os.path.join(root, f)
            yield SymbolFile(display_path=filename[len(p):].lstrip('/'),
                             file_name=filename)

    elif IsTarball(p):
      logging.info('processing files inside %s', p)
      tardir = tempfile.mkdtemp(dir=tempdir)
      cache.Untar(os.path.realpath(p), tardir)
      for sym in FindSymbolFiles(tardir, [tardir]):
        yield sym

    else:
      yield SymbolFile(display_path=p, file_name=p)
def _get_metadata_dict(build):
    """
    Get a dictionary of metadata related to this failure.

    Metadata.json is created in the HWTest Archiving stage, if this file
    isn't found the call to Cat will timeout after the number of retries
    specified in the GSContext object. If metadata.json exists we parse
    a json string of it's contents into a dictionary, which we return.

    @param build: A string, e.g. stout32-release/R30-4433.0.0

    @returns: A dictionary with the contents of metadata.json.

    """
    if not fundamental_libs:
        return
    try:
        tempdir = autotemp.tempdir()
        gs_context = gs.GSContext(retries=_GS_RETRIES, cache_dir=tempdir.name)
        gs_cmd = '%s%s%s/metadata.json' % (_gs_file_prefix,
                                           _chromeos_image_archive, build)
        return json.loads(gs_context.Cat(gs_cmd))
    except (cros_build_lib.RunCommandError, gs.GSContextException) as e:
        logging.debug(e)
    finally:
        tempdir.clean()
示例#13
0
def main(argv):
  options = ParseArgs(argv)

  if not cros_build_lib.IsInsideChroot():
    raise commandline.ChrootRequiredError()

  if os.geteuid() != 0:
    cros_build_lib.SudoRunCommand(sys.argv)
    return

  # sysroot must have a trailing / as the tree dictionary produced by
  # create_trees in indexed with a trailing /.
  sysroot = cros_build_lib.GetSysroot(options.board) + '/'
  trees = create_trees(target_root=sysroot, config_root=sysroot)

  vartree = trees[sysroot]['vartree']

  cache_dir = os.path.join(path_util.FindCacheDir(),
                           'cros_install_debug_syms-v' + CACHE_VERSION)

  if options.clearcache:
    osutils.RmDir(cache_dir, ignore_missing=True)

  binhost_cache = None
  if options.cachebinhost:
    binhost_cache = cache.DiskCache(cache_dir)

  boto_file = vartree.settings['BOTO_CONFIG']
  if boto_file:
    os.environ['BOTO_CONFIG'] = boto_file

  gs_context = gs.GSContext()
  symbols_mapping = RemoteSymbols(vartree, binhost_cache)

  if options.all:
    to_install = vartree.dbapi.cpv_all()
  else:
    to_install = [GetMatchingCPV(p, vartree.dbapi) for p in options.packages]

  to_install = [p for p in to_install
                if ShouldGetSymbols(p, vartree.dbapi, symbols_mapping)]

  if not to_install:
    logging.info('nothing to do, exit')
    return

  with DebugSymbolsInstaller(vartree, gs_context, sysroot,
                             not options.debug) as installer:
    args = [(p, symbols_mapping[p]) for p in to_install]
    parallel.RunTasksInProcessPool(installer.Install, args,
                                   processes=options.jobs)

  logging.debug('installation done, updating packages index file')
  packages_dir = os.path.join(sysroot, 'packages')
  packages_file = os.path.join(packages_dir, 'Packages')
  # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols
  # in the packages dir.
  pkgindex = binpkg.GrabLocalPackageIndex(packages_dir)
  with open(packages_file, 'w') as p:
    pkgindex.Write(p)
示例#14
0
    def PerformStage(self):
        buildroot = self._build_root
        gs_context = gs.GSContext()
        cpv = portage_util.BestVisible(constants.CHROME_CP,
                                       buildroot=buildroot)
        version_number = cpv.version

        # We need the name of one board that has been setup in this
        # builder to find the Chrome ebuild. The chrome ebuild should be
        # the same for all the boards, so just use the first one.
        # If we don't have any boards, leave the called function to guess.
        board = self._boards[0] if self._boards else None
        arch_profiles = {}
        for arch in afdo.AFDO_ARCH_GENERATORS:
            afdo_file = afdo.GetLatestAFDOFile(cpv, arch, buildroot,
                                               gs_context)
            if not afdo_file:
                raise afdo.MissingAFDOData(
                    'Could not find appropriate AFDO profile')
            state = 'current' if version_number in afdo_file else 'previous'
            logging.info('Found %s %s AFDO profile %s', state, arch, afdo_file)
            arch_profiles[arch] = afdo_file

        # Now update the Chrome ebuild file with the AFDO profiles we found
        # for each architecture.
        afdo.UpdateChromeEbuildAFDOFile(board, arch_profiles)
示例#15
0
  def PerformStage(self):
    if not config_lib.IsCanaryType(self._run.config.build_type):
      logging.info('This stage runs only in release builders.')
      return

    # Get the Android versions set by AndroidMetadataStage.
    version_dict = self._run.attrs.metadata.GetDict().get('version', {})
    android_build_branch = version_dict.get('android-branch')
    android_version = version_dict.get('android')

    # On boards not supporting Android, versions will be None.
    if not (android_build_branch and android_version):
      logging.info('Android is not enabled on this board. Skipping.')
      return

    logging.info(
        'Downloading symbols of Android %s (%s)...',
        android_version, android_build_branch)

    arch = self._run.DetermineAndroidABI(self._current_board)

    symbols_file_url = constants.ANDROID_SYMBOLS_URL_TEMPLATE % {
        'branch': android_build_branch,
        'arch': arch,
        'version': android_version}
    symbols_file = os.path.join(self.archive_path,
                                constants.ANDROID_SYMBOLS_FILE)
    gs_context = gs.GSContext()
    gs_context.Copy(symbols_file_url, symbols_file)
    def UploadDummyArtifact(self, path, faft_hack=False):
        """Upload artifacts to the dummy build results."""
        logging.info('UploadDummyArtifact: %s', path)
        with osutils.TempDir(prefix='dummy') as tempdir:
            artifact_path = os.path.join(
                tempdir,
                '%s/%s' % (self._current_board, os.path.basename(path)))

            logging.info('Rename: %s -> %s', path, artifact_path)
            os.mkdir(os.path.join(tempdir, self._current_board))
            shutil.copyfile(path, artifact_path)

            logging.info('Main artifact from: %s', artifact_path)

            if faft_hack:
                # We put the firmware artifact in a directory named by board so that
                # immutable FAFT infrastructure can find it. We should remove this.
                self.UploadArtifact(artifact_path,
                                    archive=True,
                                    prefix=self._current_board)
            else:
                self.UploadArtifact(artifact_path, archive=True)

        gs_context = gs.GSContext(dry_run=self._run.options.debug_forced)
        for url in self.GetDummyArchiveUrls():
            logging.info('Uploading dummy artifact to %s...', url)
            with timeout_util.Timeout(20 * 60):
                logging.info('Dummy artifact from: %s', path)
                gs_context.CopyInto(path, url, parallel=True, recursive=True)
示例#17
0
    def _UploadStatus(self,
                      version,
                      status,
                      message=None,
                      fail_if_exists=False):
        """Upload build status to Google Storage.

    Args:
      version: Version number to use. Must be a string.
      status: Status string.
      message: Additional message explaining the status.
      fail_if_exists: If set, fail if the status already exists.
    """
        url = BuildSpecsManager._GetStatusUrl(self.build_name, version)

        # Pickle the dictionary needed to create a BuilderStatus object.
        data = cPickle.dumps(dict(status=status, message=message))

        # This HTTP header tells Google Storage to return the PreconditionFailed
        # error message if the file already exists.
        gs_version = 0 if fail_if_exists else None

        # Do the actual upload.
        ctx = gs.GSContext(dry_run=self.dry_run)
        ctx.Copy('-', url, input=data, version=gs_version)
示例#18
0
 def testInitBotoFileEnvError(self):
     """Boto file through env var error."""
     self.assertEquals(gs.GSContext().boto_file, self.boto_file)
     # Check env usage next; no need to cleanup, teardown handles it,
     # and we want the env var to persist for the next part of this test.
     os.environ['BOTO_CONFIG'] = self.bad_path
     self.assertRaises(gs.GSContextException, gs.GSContext)
示例#19
0
def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)
    options.Freeze()

    ctx = gs.GSContext()
    ctx.GetDefaultGSUtilBin()  # To force caching of gsutil pre-forking.

    now = datetime.datetime.now()
    expired_cutoff = now - BUILD_CLEANUP
    logging.info('Cutoff: %s', expired_cutoff)

    if options.chromeos_image_archive:
        archiveExcludes = LocateChromeosImageArchiveProtectedPrefixes(ctx)
        logging.info('Excluding:%s', '\n  '.join(archiveExcludes))

        archiveCandidates = ProduceFilteredCandidates(
            ctx, 'gs://chromeos-image-archive/', archiveExcludes, 2)
        Examine(ctx, options.dry_run, expired_cutoff, archiveCandidates)

    if options.chromeos_releases:
        remote_branches = ListRemoteBranches()
        protected_branches = ProtectedBranchVersions(remote_branches)
        logging.info('Protected branch versions: %s',
                     '\n  '.join(protected_branches))

        releasesExcludes = LocateChromeosReleasesProtectedPrefixes(
            ctx, protected_branches)
        logging.info('Excluding:%s', '\n  '.join(releasesExcludes))

        releasesCandidates = ProduceFilteredCandidates(
            ctx, 'gs://chromeos-releases/', releasesExcludes, 3)
        Examine(ctx, options.dry_run, expired_cutoff, releasesCandidates)

    return
    def PerformStage(self):
        version_info = self._run.GetVersionInfo()
        chrome_major_version = int(version_info.chrome_branch)

        # Generate these for the last few Chrome versions. the number was
        # arbitrarily selected, but we probably don't care after that point (and if
        # we do, we can just run a tryjob with a locally patched value of N).
        milestones = list(range(chrome_major_version - 2,
                                chrome_major_version))
        gs_context = gs.GSContext()

        skipped, merge_plan = afdo.GenerateReleaseProfileMergePlan(
            gs_context, milestones)
        for skip in skipped:
            logging.warning("Can't merge profile(s) for M%s at this time",
                            skip)

        if not merge_plan:
            raise ValueError('No mergeable profiles. Fail.')

        logging.info('Merge plan: %s', merge_plan)
        merge_results = afdo.ExecuteReleaseProfileMergePlan(
            gs_context, self._build_root, merge_plan)

        assert len(merge_results) == len(merge_plan), 'Missing results?'
        run_id = str(int(time.time()))
        afdo.UploadReleaseProfiles(gs_context, run_id, merge_plan,
                                   merge_results)
示例#21
0
  def __init__(self, channel, board, version, bucket=None, unique=None,
               ctx=None):
    """This initializer identifies the build an payload that need signatures.

    Args:
      channel: Channel of the build whose payload is being signed.
      board: Board of the build whose payload is being signed.
      version: Version of the build whose payload is being signed.
      bucket: Bucket used to reach the signer. [defaults 'chromeos-releases']
      unique: Force known 'unique' id. Mostly for unittests.
      ctx: GS Context to use for GS operations.
    """
    self.channel = channel
    self.board = board
    self.version = version
    self.bucket = bucket if bucket else gspaths.ChromeosReleases.BUCKET
    self._ctx = ctx if ctx is not None else gs.GSContext()

    build_signing_uri = gspaths.ChromeosReleases.BuildPayloadsSigningUri(
        channel,
        board,
        version,
        bucket=bucket)

    # Uniquify the directory using our pid/thread-id. This can't collide
    # with other hosts because the build is locked to our host in
    # paygen_build.
    if unique is None:
      unique = '%d-%d' % (os.getpid(), threading.current_thread().ident)

    # This is a partial URI that is extended for a lot of other URIs we use.
    self.signing_base_dir = os.path.join(build_signing_uri, unique)

    self.archive_uri = os.path.join(self.signing_base_dir,
                                    'payload.hash.tar.bz2')
示例#22
0
  def __init__(self, goma_log_dir, today=None, dry_run=False):
    """Initializes the uploader.

    Args:
      goma_log_dir: path to the directory containing goma's INFO log files.
      today: datetime.date instance representing today. This is for testing
        purpose, because datetime.date is unpatchable. In real use case,
        this must be None.
      dry_run: If True, no actual upload. This is for testing purpose.
    """
    self._goma_log_dir = goma_log_dir
    logging.info('Goma log directory is: %s', self._goma_log_dir)

    # Set log upload destination.
    if today is None:
      today = datetime.date.today()
    self.dest_path = '%s/%s' % (
        today.strftime('%Y/%m/%d'), cros_build_lib.GetHostName())
    self._remote_dir = 'gs://%s/%s' % (GomaLogUploader._BUCKET, self.dest_path)
    logging.info('Goma log upload destination: %s', self._remote_dir)

    # Build metadata to be annotated to log files.
    # Use OrderedDict for json output stabilization.
    builder_info = json.dumps(collections.OrderedDict([
        ('builder', os.environ.get('BUILDBOT_BUILDERNAME', '')),
        ('master', os.environ.get('BUILDBOT_MASTERNAME', '')),
        ('slave', os.environ.get('BUILDBOT_SLAVENAME', '')),
        ('clobber', bool(os.environ.get('BUILDBOT_CLOBBER'))),
        ('os', 'chromeos'),
    ]))
    logging.info('BuilderInfo: %s', builder_info)
    self._headers = ['x-goog-meta-builderinfo:' + builder_info]

    self._gs_context = gs.GSContext(dry_run=dry_run)
示例#23
0
    def _UpdateStreakCounter(self, final_status, counter_name, dry_run=False):
        """Update the given streak counter based on the final status of build.

    A streak counter counts the number of consecutive passes or failures of
    a particular builder. Consecutive passes are indicated by a positive value,
    consecutive failures by a negative value.

    Args:
      final_status: String indicating final status of build,
                    constants.FINAL_STATUS_PASSED indicating success.
      counter_name: Name of counter to increment, typically the name of the
                    build config.
      dry_run: Pretend to update counter only. Default: False.

    Returns:
      The new value of the streak counter.
    """
        gs_ctx = gs.GSContext(dry_run=dry_run)
        counter_url = os.path.join(site_config.params.MANIFEST_VERSIONS_GS_URL,
                                   constants.STREAK_COUNTERS, counter_name)
        gs_counter = gs.GSCounter(gs_ctx, counter_url)

        if final_status == constants.FINAL_STATUS_PASSED:
            streak_value = gs_counter.StreakIncrement()
        else:
            streak_value = gs_counter.StreakDecrement()

        return streak_value
示例#24
0
  def __init__(self, options, test_results_root,
               project=constants.GCE_PROJECT,
               zone=constants.GCE_DEFAULT_ZONE,
               network=constants.GCE_DEFAULT_NETWORK,
               machine_type=constants.GCE_DEFAULT_MACHINE_TYPE,
               json_key_file=constants.GCE_JSON_KEY,
               gcs_bucket=constants.GCS_BUCKET):
    """Processes GCE-specific options."""
    super(GCEAUWorker, self).__init__(options, test_results_root)
    self.gce_context = gce.GceContext.ForServiceAccountThreadSafe(
        project, zone, json_key_file=json_key_file)
    self.json_key_file = json_key_file
    self.gscontext = gs.GSContext()
    self.network = network
    self.machine_type = machine_type
    self.gcs_bucket = gcs_bucket
    self.tarball_local = None
    self.tarball_remote = None
    self.image = None
    self.image_link = None
    # One instance per test.
    self.instances = {}

    # Background processes that delete throw-away instances.
    self.bg_delete_processes = []

    # Load test specifications from <overlay>/scripts/gce_tests.json, if any.
    self._LoadTests()
示例#25
0
文件: afdo.py 项目: msisov/chromium68
def GetAvailableKernelProfiles():
    """Get available profiles on specified gsurl.

  Returns:
    a dictionary that maps kernel version, e.g. "4_4" to a list of
    [milestone, major, minor, timestamp]. E.g,
    [62, 9901, 21, 1506581147]
  """

    gs_context = gs.GSContext()
    gs_ls_url = os.path.join(KERNEL_PROFILE_URL, KERNEL_PROFILE_LS_PATTERN)
    gs_match_url = os.path.join(KERNEL_PROFILE_URL,
                                KERNEL_PROFILE_NAME_PATTERN)
    try:
        res = gs_context.List(gs_ls_url)
    except gs.GSNoSuchKey:
        logging.info('gs files not found: %s', gs_ls_url)
        return {}

    matches = filter(None, [re.match(gs_match_url, p.url) for p in res])
    versions = {}
    for m in matches:
        versions.setdefault(m.group(1), []).append(map(int, m.groups()[1:]))
    for v in versions:
        versions[v].sort()
    return versions
示例#26
0
def _FetchChromePackage(cache_dir, tempdir, gs_path):
    """Get the chrome prebuilt tarball from GS.

  Returns:
    Path to the fetched chrome tarball.
  """
    gs_ctx = gs.GSContext(cache_dir=cache_dir, init_boto=True)
    files = gs_ctx.LS(gs_path)
    files = [
        found for found in files
        if _UrlBaseName(found).startswith('%s-' % constants.CHROME_PN)
    ]
    if not files:
        raise Exception('No chrome package found at %s' % gs_path)
    elif len(files) > 1:
        # - Users should provide us with a direct link to either a stripped or
        #   unstripped chrome package.
        # - In the case of being provided with an archive directory, where both
        #   stripped and unstripped chrome available, use the stripped chrome
        #   package.
        # - Stripped chrome pkg is chromeos-chrome-<version>.tar.gz
        # - Unstripped chrome pkg is chromeos-chrome-<version>-unstripped.tar.gz.
        files = [f for f in files if not 'unstripped' in f]
        assert len(files) == 1
        logging.warning('Multiple chrome packages found.  Using %s', files[0])

    filename = _UrlBaseName(files[0])
    logging.info('Fetching %s...', filename)
    gs_ctx.Copy(files[0], tempdir, print_cmd=False)
    chrome_path = os.path.join(tempdir, filename)
    assert os.path.exists(chrome_path)
    return chrome_path
示例#27
0
def GetAvailableKernelProfiles():
    """Get available profiles on specified gsurl.

  Returns:
    a dictionary that maps kernel version, e.g. "4_4" to a list of
    [milestone, major, minor, timestamp]. E.g,
    [62, 9901, 21, 1506581147]
  """

    gs_context = gs.GSContext()
    gs_ls_url = os.path.join(KERNEL_PROFILE_URL, KERNEL_PROFILE_LS_PATTERN)
    gs_match_url = os.path.join(KERNEL_PROFILE_URL,
                                KERNEL_PROFILE_NAME_PATTERN)
    try:
        res = gs_context.List(gs_ls_url)
    except gs.GSNoSuchKey:
        logging.info('gs files not found: %s', gs_ls_url)
        return {}

    all_matches = [re.match(gs_match_url, x.url) for x in res]
    matches = [x for x in all_matches if x]
    versions = {}
    for m in matches:
        versions.setdefault(m.group(1),
                            []).append([int(x) for x in m.groups()[1:]])
    for v in versions:
        # crbug.com/984153: Sort the kernel profiles only by (milestone, timestamp)
        versions[v].sort(key=lambda x: (x[0], x[3]))
    return versions
示例#28
0
    def __init__(self,
                 manage_builds=False,
                 board=None,
                 version=None,
                 images_dir=None,
                 log_screen=True,
                 **kwargs):
        super(XBuddy, self).__init__(**kwargs)

        if not log_screen:
            cherrypy_log_util.UpdateConfig({'log.screen': False})

        self.config = self._ReadConfig()
        self._manage_builds = manage_builds or self._ManageBuilds()
        self._board = board
        self._version = version
        self._timestamp_folder = os.path.join(self.static_dir,
                                              Timestamp.XBUDDY_TIMESTAMP_DIR)
        if images_dir:
            self.images_dir = images_dir
        else:
            self.images_dir = os.path.join(self.GetSourceRoot(),
                                           'src/build/images')

        cache_user = '******' if common_util.IsRunningOnMoblab() else None
        self._ctx = gs.GSContext(cache_user=cache_user)

        common_util.MkDirP(self._timestamp_folder)
    def testGetHashSignatures(self):
        """Integration test that talks to the real signer with test hashes."""
        ctx = gs.GSContext()

        unique_id = '%s.%d' % (socket.gethostname(), os.getpid())
        clean_uri = ('gs://chromeos-releases/test-channel/%s/'
                     'crostools-client/**') % unique_id

        # Cleanup before we start
        ctx.Remove(clean_uri, ignore_missing=True)

        try:
            hashes = [
                '0' * 32, '1' * 32,
                ('29834370e415b3124a926c903906f18b'
                 '3d52e955147f9e6accd67e9512185a63')
            ]

            keysets = ['update_signer']

            expected_sigs_hex = ((
                'ba4c7a86b786c609bf6e4c5fb9c47525608678caa532bea8acc457aa6dd32b43'
                '5f094b331182f2e167682916990c40ff7b6b0128de3fa45ad0fd98041ec36d6f'
                '63b867bcf219804200616590a41a727c2685b48340efb4b480f1ef448fc7bc3f'
                'b1c4b53209e950ecc721b07a52a41d9c025fd25602340c93d5295211308caa29'
                'a03ed18516cf61411c508097d5b47620d643ed357b05213b2b9fa3a3f938d6c4'
                'f52b85c3f9774edc376902458344d1c1cd72bc932f033c076c76fee2400716fe'
                '652306871ba923021ce245e0c778ad9e0e50e87a169b2aea338c4dc8b5c0c716'
                'aabfb6133482e8438b084a09503db27ca546e910f8938f7805a8a76a3b0d0241',
            ), (
                '2d909ca5b33a7fb6f2323ca0bf9de2e4f2266c73da4b6948a517dffa96783e08'
                'ca36411d380f6e8a20011f599d8d73576b2a141a57c0873d089726e24f62c7e0'
                '346ba5fbde68414b0f874b627fb1557a6e9658c8fac96c54f458161ea770982b'
                'fa9fe514120635e5ccb32e8219b9069cb0bf8063fba48d60d649c5af203cccef'
                'ca5dbc2191f81f0215edbdee4ec8c1553e69b83036aca3e840227d317ff6cf8b'
                '968c973f698db1ce59f6871303dcdbe839400c5df4d2e6e505d68890010a4459'
                '6ca9fee77f4db6ea3448d98018437c319fc8c5f4603ef94b04e3a4eafa206b73'
                '91a2640d43128310285bc0f1c7e5060d37c433d663b1c6f01110b9a43f2a74f4',
            ), ('23791c99ab937f1ae5d4988afc9ceca39c290ac90e3da9f243f9a0b1c86c3c32'
                'ab7241d43dfc233da412bab989cf02f15a01fe9ea4b2dc7dc9182117547836d6'
                '9310af3aa005ee3a6deb9602bc676dcc103bf3f7831d64ab844b4785c5c8b4b1'
                '4467e6b5ab6bf34c12f7534e0d5140151c8f28e8276e703dd6332c2bab9e7f4a'
                '495215998ff56e476b81bd6b8d765e1f87da50c22cd52c9afa8c43a6528ab898'
                '6d7a273d9136d5aff5c4d95985d16eeec7380539ef963e0784a0de42b42890df'
                'c83702179f69f5c6eca4630807fbc4ab6241017e0942b15feada0b240e9729bf'
                '33bf456bd419da63302477e147963550a45c6cf60925ff48ad7b309fa158dcb2',
                ))

            expected_sigs = [[sig[0].decode('hex')]
                             for sig in expected_sigs_hex]

            all_signatures = self.client.GetHashSignatures(hashes, keysets)

            self.assertEquals(all_signatures, expected_sigs)
            self.assertRaises(gs.GSNoSuchKey, ctx.List, clean_uri)

        finally:
            # Cleanup when we are over
            ctx.Remove(clean_uri, ignore_missing=True)
示例#30
0
def _upload_logs(dirpath, gspath):
    """Upload report logs to Google Storage.

    @param dirpath  Path to directory containing the logs.
    @param gspath   Path to GS bucket.
    """
    ctx = gs.GSContext()
    ctx.Copy(dirpath, gspath, recursive=True)