Пример #1
0
def archive(options, args):
    build_dir, _ = chromium_utils.ConvertBuildDirToLegacy(
        options.build_dir, use_out=chromium_utils.IsLinux())
    build_dir = os.path.join(build_dir, options.target)
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir = None

    # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not
    # ready with the server-side change.
    if chromium_utils.IsMac():
        subdir = '%s-%s' % (chromium_utils.PlatformName(),
                            options.target.lower())

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Пример #2
0
def Archive(options):
    src_dir = os.path.abspath(options.src_dir)
    build_dir = GetRealBuildDirectory(options.build_dir, options.target,
                                      options.factory_properties)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    webkit_dir, webkit_revision = None, None
    if options.webkit_dir:
        webkit_dir = os.path.join(src_dir, options.webkit_dir)
        webkit_revision = slave_utils.SubversionRevision(webkit_dir)

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)
    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]
    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list)
    if webkit_revision:
        (zip_base,
         zip_ext) = MakeWebKitVersionedArchive(zip_file, build_revision,
                                               webkit_revision, options)
    else:
        (zip_base, zip_ext) = MakeVersionedArchive(zip_file, build_revision,
                                                   options)
    PruneOldArchives(staging_dir, zip_base, zip_ext)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    WriteRevisionFile(staging_dir, build_revision)

    return 0
Пример #3
0
def archive(options, args):
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))
    build_dir = os.path.join(src_dir, 'out', options.target)
    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    prefix = options.factory_properties.get('asan_archive_name', 'asan')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Пример #4
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
Пример #5
0
def archive(options, args):
    # Disable 'unused argument' warning for 'args' | pylint: disable=W0613
    build_dir = build_directory.GetBuildOutputDirectory()
    src_dir = os.path.abspath(os.path.dirname(build_dir))
    build_dir = os.path.join(build_dir, options.target)

    revision_dir = options.factory_properties.get('revision_dir')
    primary_project = chromium_utils.GetPrimaryProject(options)

    build_sortkey_branch, build_sortkey_value = GetBuildSortKey(
        options, primary_project)
    build_git_commit = GetGitCommit(options, primary_project)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                   '')
    pieces = [chromium_utils.PlatformName(), options.target.lower()]
    if subdir_suffix:
        pieces.append(subdir_suffix)
    subdir = '-'.join(pieces)

    # Components like v8 get a <name>-v8-component-<revision> infix.
    component = ''
    if revision_dir:
        component = '-%s-component' % revision_dir

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    sortkey_path = chromium_utils.GetSortableUploadPathForSortKey(
        build_sortkey_branch, build_sortkey_value)
    zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(),
                                       options.target.lower(), component,
                                       sortkey_path)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)

    gs_metadata = {
        GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value,
    }
    if build_sortkey_branch:
        gs_metadata[
            GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition(
                build_sortkey_branch, build_sortkey_value)
    if build_git_commit:
        gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit

    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl,
                                        metadata=gs_metadata)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Пример #6
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = (options.staging_dir
                   or slave_utils.GetStagingDir(options.src_dir))
    if not os.path.exists(staging_dir):
        os.makedirs(staging_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    # TODO(robertocn): Remove this if no one other than bisect uses it.
    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)
    path_filter = PathMatcher(options)

    # Expand one level deep so that secondary toolchains can be filtered.
    for i in xrange(len(root_files) - 1, -1, -1):
        path = root_files[i]
        # Don't expand directories that will be filtered out.
        if not path_filter.Match(path):
            continue
        abs_path = os.path.join(build_dir, path)
        if os.path.isdir(abs_path):
            root_files[i:i + 1] = [
                os.path.join(path, f) for f in os.listdir(abs_path)
            ]

    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Switch layout tests to use files from 'gen/layout_test_data'
    # and remove this.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    layout_test_data_files = _LayoutTestFiles(build_dir)
    print 'Include layout test data: %s' % layout_test_data_files
    zip_file_list.extend(layout_test_data_files)

    zip_file = MakeUnversionedArchive(build_dir,
                                      staging_dir,
                                      zip_file_list,
                                      unversioned_base_name,
                                      strip_files=options.strip_files)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl,
                                        options.gsutil_py_path)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
Пример #7
0
def archive(options, args):
  build_dir = build_directory.GetBuildOutputDirectory()
  src_dir = os.path.abspath(os.path.dirname(build_dir))
  build_dir = os.path.join(build_dir, options.target)

  revision_dir = options.factory_properties.get('revision_dir')
  (build_revision, _) = slave_utils.GetBuildRevisions(
      src_dir, None, revision_dir)

  staging_dir = slave_utils.GetStagingDir(src_dir)
  chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

  print 'Staging in %s' % build_dir

  # Build the list of files to archive.
  zip_file_list = [f for f in os.listdir(build_dir)
                   if ShouldPackageFile(f, options.target)]

  subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                 '')
  pieces = [chromium_utils.PlatformName(), options.target.lower()]
  if subdir_suffix:
    pieces.append(subdir_suffix)
  subdir = '-'.join(pieces)

  # Components like v8 get a <name>-v8-component-<revision> infix.
  component = ''
  if revision_dir:
    component = '-%s-component' % revision_dir

  prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
  zip_file_name = '%s-%s-%s%s-%s' % (prefix,
                                   chromium_utils.PlatformName(),
                                   options.target.lower(),
                                   component,
                                   build_revision)

  (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                               zip_file_name,
                                               zip_file_list,
                                               build_dir,
                                               raise_error=True)
  chromium_utils.RemoveDirectory(zip_dir)
  if not os.path.exists(zip_file):
    raise StagingError('Failed to make zip package %s' % zip_file)
  chromium_utils.MakeWorldReadable(zip_file)

  # Report the size of the zip file to help catch when it gets too big.
  zip_size = os.stat(zip_file)[stat.ST_SIZE]
  print 'Zip file is %ld bytes' % zip_size

  gs_bucket = options.factory_properties.get('gs_bucket', None)
  gs_acl = options.factory_properties.get('gs_acl', None)
  status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir,
                                      gs_acl=gs_acl)
  if status:
    raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file,
                                                                gs_bucket,
                                                                status))
  else:
    # Delete the file, it is not needed anymore.
    os.remove(zip_file)

  return status
Пример #8
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(options.src_dir)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    append_deps_patch_sha = options.factory_properties.get(
        'append_deps_patch_sha')

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.build_properties,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not append_deps_patch_sha))

    if append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_js_files = MojomJSFiles(build_dir)
    print 'Include mojom JavaScript files: %s' % mojom_js_files
    zip_file_list.extend(mojom_js_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name,
                                      options.path_filter)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = max(0, int(options.factory_properties.get('prune_limit',
                                                            10)))
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    build_url = (options.build_url
                 or options.factory_properties.get('build_url', ''))
    if build_url.startswith('gs://'):
        gs_acl = options.factory_properties.get('gs_acl')
        UploadToGoogleStorage(versioned_file, revision_file, build_url, gs_acl)

    return 0