Example #1
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
Example #2
0
 def testWebKitDir(self):
     (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
         _BUILD_DIR, webkit_dir=_BUILD_DIR)
     self.assertTrue(build_revision > 0)
     self.assertTrue(webkit_revision > 0)
Example #3
0
 def testRevisionDir(self):
     (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
         _BUILD_DIR, revision_dir=_BUILD_DIR)
     self.assertTrue(build_revision > 0)
     self.assertEquals(None, webkit_revision)
Example #4
0
def real_main(options):
    """ Download a build, extract it to build\BuildDir\full-build-win32
      and rename it to build\BuildDir\Target
  """
    abs_build_dir = os.path.abspath(
        build_directory.GetBuildOutputDirectory(options.src_dir))
    target_build_output_dir = os.path.join(abs_build_dir, options.target)

    # Generic name for the archive.
    archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

    # Just take the zip off the name for the output directory name.
    output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

    src_dir = os.path.dirname(abs_build_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision
    base_url, url = GetBuildUrl(options, build_revision, webkit_revision)
    archive_name = os.path.basename(base_url)

    if url.startswith('gs://'):
        handler = GSHandler(url=url, archive_name=archive_name)
    else:
        handler = WebHandler(url=url, archive_name=archive_name)

    # We try to download and extract 3 times.
    for tries in range(1, 4):
        print 'Try %d: Fetching build from %s...' % (tries, url)

        failure = False

        # Check if the url exists.
        if not handler.is_present():
            print '%s is not found' % url
            failure = True

            # When 'halt_on_missing_build' is present in factory_properties and if
            # 'revision' is set in build properties, we assume the build is
            # triggered automatically and so we halt on a missing build zip.  The
            # other case is if the build is forced, in which case we keep trying
            # later by looking for the latest build that's available.
            if (options.factory_properties.get('halt_on_missing_build', False)
                    and 'revision' in options.build_properties
                    and options.build_properties['revision'] != ''):
                return slave_utils.ERROR_EXIT_CODE

        # If the url is valid, we download the file.
        if not failure:
            if not handler.download():
                failure = True

        # If the versioned url failed, we try to get the latest build.
        if failure:
            if url.startswith('gs://'):
                continue
            else:
                print 'Fetching latest build at %s' % base_url
                handler.url = base_url
                if not handler.download():
                    continue

        print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
        try:
            chromium_utils.RemoveDirectory(target_build_output_dir)
            chromium_utils.ExtractZip(archive_name, abs_build_dir)
            # For Chrome builds, the build will be stored in chrome-win32.
            if 'full-build-win32' in output_dir:
                chrome_dir = output_dir.replace('full-build-win32',
                                                'chrome-win32')
                if os.path.exists(chrome_dir):
                    output_dir = chrome_dir

            print 'Moving build from %s to %s' % (output_dir,
                                                  target_build_output_dir)
            shutil.move(output_dir, target_build_output_dir)
        except (OSError, IOError, chromium_utils.ExternalError):
            print 'Failed to extract the build.'
            # Print out the traceback in a nice format
            traceback.print_exc()
            # Try again...
            continue

        # If we got the latest build, then figure out its revision number.
        if failure:
            print "Trying to determine the latest build's revision number..."
            try:
                build_revision_file_name = os.path.join(
                    target_build_output_dir,
                    chromium_utils.FULL_BUILD_REVISION_FILENAME)
                build_revision_file = open(build_revision_file_name, 'r')
                print 'Latest build is revision: %s' % build_revision_file.read(
                )
                build_revision_file.close()
            except IOError:
                print "Could not determine the latest build's revision number"

        if failure:
            # We successfully extracted the archive, but it was the generic one.
            return slave_utils.WARNING_EXIT_CODE
        return 0

    # If we get here, that means that it failed 3 times. We return a failure.
    return slave_utils.ERROR_EXIT_CODE
Example #5
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = (options.staging_dir
                   or slave_utils.GetStagingDir(options.src_dir))
    if not os.path.exists(staging_dir):
        os.makedirs(staging_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    # TODO(robertocn): Remove this if no one other than bisect uses it.
    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)
    path_filter = PathMatcher(options)

    # Expand one level deep so that secondary toolchains can be filtered.
    for i in xrange(len(root_files) - 1, -1, -1):
        path = root_files[i]
        # Don't expand directories that will be filtered out.
        if not path_filter.Match(path):
            continue
        abs_path = os.path.join(build_dir, path)
        if os.path.isdir(abs_path):
            root_files[i:i + 1] = [
                os.path.join(path, f) for f in os.listdir(abs_path)
            ]

    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Switch layout tests to use files from 'gen/layout_test_data'
    # and remove this.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    layout_test_data_files = _LayoutTestFiles(build_dir)
    print 'Include layout test data: %s' % layout_test_data_files
    zip_file_list.extend(layout_test_data_files)

    zip_file = MakeUnversionedArchive(build_dir,
                                      staging_dir,
                                      zip_file_list,
                                      unversioned_base_name,
                                      strip_files=options.strip_files)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl,
                                        options.gsutil_py_path)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
def real_main(options):
  """ Download a build, extract it to build\\BuildDir\\full-build-win32
      and rename it to build\\BuildDir\\Target
  """
  abs_build_dir = os.path.abspath(
      build_directory.GetBuildOutputDirectory(options.src_dir))
  target_build_output_dir = os.path.join(abs_build_dir, options.target)

  # Generic name for the archive.
  archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

  # Just take the zip off the name for the output directory name.
  output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

  src_dir = os.path.dirname(abs_build_dir)
  if not options.build_revision and not options.build_archive_url:
    (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
        src_dir, options.webkit_dir, options.revision_dir)
  else:
    build_revision = options.build_revision
    webkit_revision = options.webkit_revision
  url, archive_name = GetBuildUrl(options, build_revision, webkit_revision)
  if archive_name is None:
    archive_name = 'build.zip'
    base_url = None
  else:
    base_url = '/'.join(url.split('/')[:-1] + [archive_name])

  if url.startswith('gs://'):
    handler = GSHandler(url=url, archive_name=archive_name)
  else:
    handler = WebHandler(url=url, archive_name=archive_name)

  # We try to download and extract 3 times.
  for tries in range(1, 4):
    print 'Try %d: Fetching build from %s...' % (tries, url)

    failure = False

    # If the url is valid, we download the file.
    if not failure:
      if not handler.download():
        if options.halt_on_missing_build:
          return slave_utils.ERROR_EXIT_CODE
        failure = True

    # If the versioned url failed, we try to get the latest build.
    if failure:
      if url.startswith('gs://') or not base_url:
        continue
      else:
        print 'Fetching latest build at %s' % base_url
        base_handler = handler.__class__(base_url, handler.archive_name)
        if not base_handler.download():
          continue

    print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
    try:
      chromium_utils.RemoveDirectory(target_build_output_dir)
      chromium_utils.ExtractZip(archive_name, abs_build_dir)
      # For Chrome builds, the build will be stored in chrome-win32.
      if 'full-build-win32' in output_dir:
        chrome_dir = output_dir.replace('full-build-win32', 'chrome-win32')
        if os.path.exists(chrome_dir):
          output_dir = chrome_dir

      print 'Moving build from %s to %s' % (output_dir, target_build_output_dir)
      shutil.move(output_dir, target_build_output_dir)
    except (OSError, IOError, chromium_utils.ExternalError):
      print 'Failed to extract the build.'
      # Print out the traceback in a nice format
      traceback.print_exc()
      # Try again...
      continue

    # If we got the latest build, then figure out its revision number.
    if failure:
      print "Trying to determine the latest build's revision number..."
      try:
        build_revision_file_name = os.path.join(
            target_build_output_dir,
            chromium_utils.FULL_BUILD_REVISION_FILENAME)
        build_revision_file = open(build_revision_file_name, 'r')
        print 'Latest build is revision: %s' % build_revision_file.read()
        build_revision_file.close()
      except IOError:
        print "Could not determine the latest build's revision number"

    if failure:
      # We successfully extracted the archive, but it was the generic one.
      return slave_utils.WARNING_EXIT_CODE
    return 0

  # If we get here, that means that it failed 3 times. We return a failure.
  return slave_utils.ERROR_EXIT_CODE
Example #7
0
def archive(options, args):
  build_dir = build_directory.GetBuildOutputDirectory()
  src_dir = os.path.abspath(os.path.dirname(build_dir))
  build_dir = os.path.join(build_dir, options.target)

  revision_dir = options.factory_properties.get('revision_dir')
  (build_revision, _) = slave_utils.GetBuildRevisions(
      src_dir, None, revision_dir)

  staging_dir = slave_utils.GetStagingDir(src_dir)
  chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

  print 'Staging in %s' % build_dir

  # Build the list of files to archive.
  zip_file_list = [f for f in os.listdir(build_dir)
                   if ShouldPackageFile(f, options.target)]

  subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                 '')
  pieces = [chromium_utils.PlatformName(), options.target.lower()]
  if subdir_suffix:
    pieces.append(subdir_suffix)
  subdir = '-'.join(pieces)

  # Components like v8 get a <name>-v8-component-<revision> infix.
  component = ''
  if revision_dir:
    component = '-%s-component' % revision_dir

  prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
  zip_file_name = '%s-%s-%s%s-%s' % (prefix,
                                   chromium_utils.PlatformName(),
                                   options.target.lower(),
                                   component,
                                   build_revision)

  (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                               zip_file_name,
                                               zip_file_list,
                                               build_dir,
                                               raise_error=True)
  chromium_utils.RemoveDirectory(zip_dir)
  if not os.path.exists(zip_file):
    raise StagingError('Failed to make zip package %s' % zip_file)
  chromium_utils.MakeWorldReadable(zip_file)

  # Report the size of the zip file to help catch when it gets too big.
  zip_size = os.stat(zip_file)[stat.ST_SIZE]
  print 'Zip file is %ld bytes' % zip_size

  gs_bucket = options.factory_properties.get('gs_bucket', None)
  gs_acl = options.factory_properties.get('gs_acl', None)
  status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir,
                                      gs_acl=gs_acl)
  if status:
    raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file,
                                                                gs_bucket,
                                                                status))
  else:
    # Delete the file, it is not needed anymore.
    os.remove(zip_file)

  return status
Example #8
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(options.src_dir)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    append_deps_patch_sha = options.factory_properties.get(
        'append_deps_patch_sha')

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.build_properties,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not append_deps_patch_sha))

    if append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_js_files = MojomJSFiles(build_dir)
    print 'Include mojom JavaScript files: %s' % mojom_js_files
    zip_file_list.extend(mojom_js_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name,
                                      options.path_filter)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = max(0, int(options.factory_properties.get('prune_limit',
                                                            10)))
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    build_url = (options.build_url
                 or options.factory_properties.get('build_url', ''))
    if build_url.startswith('gs://'):
        gs_acl = options.factory_properties.get('gs_acl')
        UploadToGoogleStorage(versioned_file, revision_file, build_url, gs_acl)

    return 0