def _GsUpload(local_file, remote_file, acl):
  """Upload to GS bucket.

  Args:
    args: a tuple of three arguments that contains local_file, remote_file, and
          the acl used for uploading the file.

  Returns:
    Return the arg tuple of two if the upload failed
  """
  CANNED_ACLS = ['public-read', 'private', 'bucket-owner-read',
                 'authenticated-read', 'bucket-owner-full-control',
                 'public-read-write']
  if acl in CANNED_ACLS:
    cmd = [gs.GSUTIL_BIN, 'cp', '-a', acl, local_file, remote_file]
    acl_cmd = None
  else:
    # For private uploads we assume that the overlay board is set up properly
    # and a googlestore_acl.xml is present. Otherwise, this script errors.
    cmd = [gs.GSUTIL_BIN, 'cp', '-a', 'private', local_file, remote_file]
    acl_cmd = [gs.GSUTIL_BIN, 'setacl', acl, remote_file]

  cros_build_lib.RunCommandWithRetries(_RETRIES, cmd, print_cmd=True,
                                       sleep=_SLEEP_TIME,
                                       redirect_stdout=True,
                                       redirect_stderr=True)
  if acl_cmd:
    # Apply the passed in ACL xml file to the uploaded object.
    cros_build_lib.RunCommandWithRetries(_RETRIES, acl_cmd, print_cmd=False,
                                         sleep=_SLEEP_TIME)
Example #2
0
    def GetLatestSHA1ForBranch(self, project, branch):
        """Finds the latest commit hash for a repository/branch.

    Returns:
      The latest commit hash for this patch's repo/branch.

    Raises:
      FailedToReachGerrit if we fail to contact gerrit.
    """
        ssh_url_project = '%s/%s' % (self.ssh_url, project)
        try:
            result = cros_build_lib.RunCommandWithRetries(
                3, [
                    'git', 'ls-remote', ssh_url_project,
                    'refs/heads/%s' % (branch, )
                ],
                redirect_stdout=True,
                print_cmd=self.print_cmd)
            if result:
                return result.output.split()[0]
        except cros_build_lib.RunCommandError as e:
            # Fall out to Gerrit error.
            logging.error('Failed to contact git server with %s', e)

        raise FailedToReachGerrit(
            'Could not contact gerrit to get latest sha1')
Example #3
0
def CloneGitRepo(working_dir, repo_url, reference=None, bare=False,
                 mirror=False, retries=constants.SYNC_RETRIES, depth=None):
  """Clone given git repo
  Args:
    repo_url: git repo to clone
    repo_dir: location where it should be cloned to
    reference: If given, pathway to a git repository to access git objects
      from.  Note that the reference must exist as long as the newly created
      repo is to be usable.
    bare: Clone a bare checkout.
    mirror: Clone a mirror checkout.
    retries: If error code 128 is encountered, how many times to retry.  When
      128 is returned from git, it's essentially a server error- specifically
      common to manifest-versions and gerrit.
    depth: If given, do a shallow clone limiting the objects pulled to just
      that # of revs of history.  This option is mutually exclusive to
      reference.
  """
  osutils.SafeMakedirs(working_dir)
  cmd = ['git', 'clone', repo_url, working_dir]
  if reference:
    if depth:
      raise ValueError("reference and depth are mutually exclusive "
                       "options; please pick one or the other.")
    cmd += ['--reference', reference]
  if bare:
    cmd += ['--bare']
  if mirror:
    cmd += ['--mirror']
  if depth:
    cmd += ['--depth', str(int(depth))]
  cros_build_lib.RunCommandWithRetries(
      retries, cmd, cwd=working_dir, redirect_stdout=True, redirect_stderr=True,
      retry_on=[128])
Example #4
0
    def _UploadStatus(self,
                      version,
                      status,
                      message=None,
                      fail_if_exists=False):
        """Upload build status to Google Storage.

    Args:
      version: Version number to use. Must be a string.
      status: Status string.
      message: Additional message explaining the status.
      fail_if_exists: If set, fail if the status already exists.
    """
        cmd = [gs.GSUTIL_BIN]
        if fail_if_exists:
            # This HTTP header tells Google Storage toreturn the PreconditionFailed
            # error message if the file already exists.
            cmd += ['-h', 'x-goog-if-generation-match: 0']
        url = BuildSpecsManager._GetStatusUrl(self.build_name, version)
        cmd += ['cp', '-', url]

        # Create a BuilderStatus object and pickle it.
        data = cPickle.dumps(dict(status=status, message=message))

        if self.dry_run:
            logging.info('Would have run: %s', ' '.join(cmd))
        else:
            # TODO(davidjames): Use chromite.lib.gs here.
            cros_build_lib.RunCommandWithRetries(3,
                                                 cmd,
                                                 redirect_stdout=True,
                                                 redirect_stderr=True,
                                                 input=data)
Example #5
0
    def GetBuildStatus(builder, version, retries=3):
        """Returns a BuilderStatus instance for the given the builder.

    Args:
      builder: Builder to look at.
      version: Version string.
      retries: Number of retries for getting the status.

    Returns:
      A BuilderStatus instance containing the builder status and any optional
      message associated with the status passed by the builder.
    """
        url = BuildSpecsManager._GetStatusUrl(builder, version)
        cmd = [gs.GSUTIL_BIN, 'cat', url]
        try:
            # TODO(davidjames): Use chromite.lib.gs here.
            result = cros_build_lib.RunCommandWithRetries(
                retries,
                cmd,
                redirect_stdout=True,
                redirect_stderr=True,
                debug_level=logging.DEBUG)
        except cros_build_lib.RunCommandError as ex:
            # If the file does not exist, InvalidUriError is returned.
            if ex.result.error and ex.result.error.startswith(
                    'InvalidUriError:'):
                return None
            raise
        return BuilderStatus(**cPickle.loads(result.output))
Example #6
0
 def GetLatestSHA1ForBranch(self, project, branch):
     url = 'https://%s/a/%s' % (self.host, project)
     cmd = ['git', 'ls-remote', url, 'refs/heads/%s' % branch]
     try:
         result = cros_build_lib.RunCommandWithRetries(
             3, cmd, redirect_stdout=True, print_cmd=self.print_cmd)
         if result:
             return result.output.split()[0]
     except cros_build_lib.RunCommandError:
         logging.error('Command "%s" failed.',
                       ' '.join(map(repr, cmd)),
                       exc_info=True)
Example #7
0
 def testGetLatestSHA1ForBranch(self):
   """Verifies we can return the correct sha1 from mock data."""
   self.mox.StubOutWithMock(cros_build_lib, 'RunCommandWithRetries')
   my_hash = 'sadfjaslfkj2135'
   my_branch = 'master'
   result = self.mox.CreateMock(cros_build_lib.CommandResult)
   result.returncode = 0
   result.output = '   '.join([my_hash, my_branch])
   cros_build_lib.RunCommandWithRetries(
       3, ['git', 'ls-remote',
           'ssh://gerrit.chromium.org:29418/tacos/chromite',
           'refs/heads/master'],
       redirect_stdout=True, print_cmd=True).AndReturn(result)
   self.mox.ReplayAll()
   helper = self._GetHelper()
   self.assertEqual(helper.GetLatestSHA1ForBranch('tacos/chromite',
                                                  my_branch), my_hash)
   self.mox.VerifyAll()
Example #8
0
  def Sync(self, local_manifest=None, jobs=None, cleanup=True,
           all_branches=False, network_only=False):
    """Sync/update the source.  Changes manifest if specified.

    Args:
      local_manifest: If true, checks out source to manifest.  DEFAULT_MANIFEST
        may be used to set it back to the default manifest.
      jobs: May be set to override the default sync parallelism defined by
        the manifest.
      cleanup: If true, repo referencing is rebuilt, insteadOf configuration is
        wiped, and appropriate remotes are setup.  Should only be turned off
        by code that knows the repo is clean.
      all_branches: If False (the default), a repo sync -c is performed; this
        saves on sync'ing via grabbing only what is needed for the manifest
        specified branch.
      network_only: If true, perform only the network half of the sync; skip
        the checkout.  Primarily of use to validate a manifest (although
        if the manifest has bad copyfile statements, via skipping checkout
        the broken copyfile tag won't be spotted), or of use when the
        invoking code is fine w/ operating on bare repos, ie .repo/projects/*.
    """
    try:
      # Always re-initialize to the current branch.
      self.Initialize(local_manifest)
      # Fix existing broken mirroring configurations.
      self._EnsureMirroring()

      if cleanup:
        configure_repo.FixBrokenExistingRepos(self.directory)

      cmd = ['repo', '--time', 'sync']
      if jobs:
        cmd += ['--jobs', str(jobs)]
      if not all_branches:
        cmd.append('-c')
      # Do the network half of the sync; retry as necessary to get the content.
      cros_build_lib.RunCommandWithRetries(constants.SYNC_RETRIES, cmd + ['-n'],
                                           cwd=self.directory)

      if network_only:
        return

      # Do the local sync; note that there is a couple of corner cases where
      # the new manifest cannot transition from the old checkout cleanly-
      # primarily involving git submodules.  Thus we intercept, and do
      # a forced wipe, then a retry.
      try:
        cros_build_lib.RunCommand(cmd + ['-l'], cwd=self.directory)
      except cros_build_lib.RunCommandError:
        manifest = git.ManifestCheckout.Cached(self.directory)
        targets = set(project['path'].split('/', 1)[0]
                      for project in manifest.projects.itervalues())
        if not targets:
          # No directories to wipe, thus nothing we can fix.
          raise
        cros_build_lib.SudoRunCommand(['rm', '-rf'] + sorted(targets),
                                      cwd=self.directory)

        # Retry the sync now; if it fails, let the exception propagate.
        cros_build_lib.RunCommand(cmd + ['-l'], cwd=self.directory)

      # Setup gerrit remote for any new repositories.
      configure_repo.SetupGerritRemote(self.directory)

      # We do a second run to fix any new repositories created by repo to
      # use relative object pathways.  Note that cros_sdk also triggers the
      # same cleanup- we however kick it erring on the side of caution.
      self._EnsureMirroring(True)
      self._DoCleanup()

    except cros_build_lib.RunCommandError, e:
      err_msg = e.Stringify(error=False, output=False)
      logging.error(err_msg)
      raise SrcCheckOutException(err_msg)