Пример #1
0
def archive(options, args):
    build_dir, _ = chromium_utils.ConvertBuildDirToLegacy(
        options.build_dir, use_out=chromium_utils.IsLinux())
    build_dir = os.path.join(build_dir, options.target)
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir = None

    # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not
    # ready with the server-side change.
    if chromium_utils.IsMac():
        subdir = '%s-%s' % (chromium_utils.PlatformName(),
                            options.target.lower())

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Пример #2
0
def archive_layout(options, args):
    logging.basicConfig(level=logging.INFO,
                        format='%(asctime)s %(filename)s:%(lineno)-3d'
                        ' %(levelname)s %(message)s',
                        datefmt='%y%m%d %H:%M:%S')
    chrome_dir = os.path.abspath(options.build_dir)
    results_dir_basename = os.path.basename(options.results_dir)
    if options.results_dir is not None:
        options.results_dir = os.path.abspath(
            os.path.join(options.build_dir, options.results_dir))
    else:
        options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
    print 'Archiving results from %s' % options.results_dir
    staging_dir = slave_utils.GetStagingDir(chrome_dir)
    print 'Staging in %s' % staging_dir

    (actual_file_list,
     diff_file_list) = _CollectArchiveFiles(options.results_dir)
    zip_file = chromium_utils.MakeZip(staging_dir, results_dir_basename,
                                      actual_file_list, options.results_dir)[1]
    full_results_json = os.path.join(options.results_dir, 'full_results.json')

    # Extract the build name of this slave (e.g., 'chrome-release') from its
    # configuration file if not provided as a param.
    build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
    build_name = re.sub('[ .()]', '_', build_name)

    last_change = str(slave_utils.SubversionRevision(chrome_dir))
    print 'last change: %s' % last_change
    print 'build name: %s' % build_name
    print 'host name: %s' % socket.gethostname()

    # Where to save layout test results.
    dest_parent_dir = os.path.join(config.Archive.www_dir_base,
                                   results_dir_basename.replace('-', '_'),
                                   build_name)
    dest_dir = os.path.join(dest_parent_dir, last_change)

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    if gs_bucket:
        gs_base = '/'.join([gs_bucket, build_name, last_change])
        gs_acl = options.factory_properties.get('gs_acl', None)
        slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl)
        slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl)
    else:
        slave_utils.MaybeMakeDirectoryOnArchiveHost(dest_dir)
        slave_utils.CopyFileToArchiveHost(zip_file, dest_dir)
        slave_utils.CopyFileToArchiveHost(full_results_json, dest_dir)
        # Not supported on Google Storage yet.
        _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir,
                                      diff_file_list, options)
    return 0
Пример #3
0
def main(argv):
    o3d_dir = os.path.join(os.getcwd(), 'o3d')
    staging_dir = slave_utils.GetStagingDir(o3d_dir)

    # Find builder name and revision #s.
    builder_name = slave_utils.SlaveBuildName(o3d_dir)
    o3d_rev = str(slave_utils.SubversionRevision(o3d_dir))
    platform = chromium_utils.PlatformName()

    # Upload zip.
    local_zip = os.path.join(staging_dir,
                             'full-build-' + platform + '_' + o3d_rev + '.zip')
    remote_zip = 'snapshots/o3d/' + o3d_rev + '/' + builder_name + '.zip'

    archive_file.UploadFile(local_zip, remote_zip)
    return 0
Пример #4
0
def Archive(options):
    src_dir = os.path.abspath(options.src_dir)
    build_dir = GetRealBuildDirectory(options.build_dir, options.target,
                                      options.factory_properties)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    webkit_dir, webkit_revision = None, None
    if options.webkit_dir:
        webkit_dir = os.path.join(src_dir, options.webkit_dir)
        webkit_revision = slave_utils.SubversionRevision(webkit_dir)

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)
    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]
    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list)
    if webkit_revision:
        (zip_base,
         zip_ext) = MakeWebKitVersionedArchive(zip_file, build_revision,
                                               webkit_revision, options)
    else:
        (zip_base, zip_ext) = MakeVersionedArchive(zip_file, build_revision,
                                                   options)
    PruneOldArchives(staging_dir, zip_base, zip_ext)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    WriteRevisionFile(staging_dir, build_revision)

    return 0
Пример #5
0
def archive(options, args):
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))
    build_dir = os.path.join(src_dir, 'out', options.target)
    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    prefix = options.factory_properties.get('asan_archive_name', 'asan')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Пример #6
0
def main(options, args):
    # Create some variables
    src_dir = os.path.abspath(options.src_dir)
    build_dir = os.path.dirname(options.build_dir)
    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    build_version = str(build_revision)

    if chromium_utils.IsMac() or chromium_utils.IsLinux():
        # Files are created umask 077 by default, we need to make sure the staging
        # dir can be fetch from, do this by recursively chmoding back up to the root
        # before pushing to web server.
        a_path = staging_dir
        while a_path != '/':
            current_permissions = os.stat(a_path)[0]
            if current_permissions & 0555 == 0555:
                break
            print 'Fixing permissions (%o) for \'%s\'' % (current_permissions,
                                                          a_path)
            os.chmod(a_path, current_permissions | 0555)
            a_path = os.path.dirname(a_path)
Пример #7
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
    def __init__(self, options, build_revision):
        """Sets a number of file and directory paths for convenient use."""

        self.options = options
        self._src_dir = os.path.abspath(options.src_dir)
        self._chrome_dir = os.path.join(self._src_dir, 'chrome')

        build_dir = build_directory.GetBuildOutputDirectory()
        self._build_dir = os.path.join(build_dir, options.target)
        if chromium_utils.IsWindows():
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'win')
        elif chromium_utils.IsLinux():
            # On Linux, we might have built for chromeos.  Archive the same.
            if (options.factory_properties.get('chromeos', None)
                    or slave_utils.GypFlagIsOn(options, 'chromeos')):
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'chromeos')
            # Or, we might have built for Android.
            elif options.factory_properties.get('target_os') == 'android':
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'android')
            else:
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'linux')
        elif chromium_utils.IsMac():
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'mac')
        else:
            raise NotImplementedError(
                'Platform "%s" is not currently supported.' % sys.platform)
        self._staging_dir = (options.staging_dir
                             or slave_utils.GetStagingDir(self._src_dir))
        if not os.path.exists(self._staging_dir):
            os.makedirs(self._staging_dir)

        self._symbol_dir_base = options.dirs['symbol_dir_base']
        self._www_dir_base = options.dirs['www_dir_base']

        if options.build_name:
            self._build_name = options.build_name
        else:
            self._build_name = slave_utils.SlaveBuildName(self._src_dir)

        self._symbol_dir_base = os.path.join(self._symbol_dir_base,
                                             self._build_name)
        self._www_dir_base = os.path.join(self._www_dir_base, self._build_name)

        self._version_file = os.path.join(self._chrome_dir, 'VERSION')

        self._chromium_revision = chromium_utils.GetBuildSortKey(options)[1]

        self._v8_revision = chromium_utils.GetBuildSortKey(options,
                                                           project='v8')[1]
        self._v8_revision_git = chromium_utils.GetGitCommit(options,
                                                            project='v8')

        self.last_change_file = os.path.join(self._staging_dir, 'LAST_CHANGE')
        # The REVISIONS file will record the revisions information of the main
        # components Chromium/WebKit/V8.
        self.revisions_path = os.path.join(self._staging_dir, 'REVISIONS')
        self._build_revision = build_revision
        self._build_path_component = str(self._build_revision)

        # Will be initialized in GetLastBuildRevision.
        self.last_chromium_revision = None
        self.last_v8_revision = None

        self._files_file = os.path.join(self._tool_dir,
                                        archive_utils.FILES_FILENAME)
        self._test_files = self.BuildOldFilesList(TEST_FILE_NAME)

        self._dual_upload = options.factory_properties.get(
            'dual_upload', False)
        self._archive_files = None
Пример #9
0
    def __init__(self, options, build_revision):
        """Sets a number of file and directory paths for convenient use."""

        self.options = options
        self._src_dir = os.path.abspath(options.src_dir)
        self._chrome_dir = os.path.join(self._src_dir, 'chrome')
        # TODO: This scode should not be grabbing so deeply into WebKit.
        #       Worse, this code ends up looking at top-of-tree WebKit
        #       instead of the revision in DEPS.
        self._webkit_dir = os.path.join(self._src_dir, 'third_party', 'WebKit',
                                        'Source')
        self._v8_dir = os.path.join(self._src_dir, 'v8')

        build_dir = build_directory.GetBuildOutputDirectory()
        self._build_dir = os.path.join(build_dir, options.target)
        if chromium_utils.IsWindows():
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'win')
        elif chromium_utils.IsLinux():
            # On Linux, we might have built for chromeos.  Archive the same.
            if (options.factory_properties.get('chromeos', None)
                    or slave_utils.GypFlagIsOn(options, 'chromeos')):
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'chromeos')
            # Or, we might have built for Android.
            elif options.factory_properties.get('target_os') == 'android':
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'android')
            else:
                self._tool_dir = os.path.join(self._chrome_dir, 'tools',
                                              'build', 'linux')
        elif chromium_utils.IsMac():
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'mac')
        else:
            raise NotImplementedError(
                'Platform "%s" is not currently supported.' % sys.platform)
        self._staging_dir = slave_utils.GetStagingDir(self._src_dir)

        self._symbol_dir_base = options.dirs['symbol_dir_base']
        self._www_dir_base = options.dirs['www_dir_base']

        if options.build_name:
            self._build_name = options.build_name
        else:
            self._build_name = slave_utils.SlaveBuildName(self._src_dir)

        self._symbol_dir_base = os.path.join(self._symbol_dir_base,
                                             self._build_name)
        self._www_dir_base = os.path.join(self._www_dir_base, self._build_name)

        self._version_file = os.path.join(self._chrome_dir, 'VERSION')

        if options.default_chromium_revision:
            self._chromium_revision = options.default_chromium_revision
        else:
            self._chromium_revision = slave_utils.GetHashOrRevision(
                os.path.dirname(
                    self._chrome_dir))  # src/ instead of src/chrome
        if options.default_webkit_revision:
            self._webkit_revision = options.default_webkit_revision
        else:
            self._webkit_revision = slave_utils.GetHashOrRevision(
                os.path.dirname(
                    self._webkit_dir))  # WebKit/ instead of WebKit/Source
        if options.default_v8_revision:
            self._v8_revision = options.default_v8_revision
        else:
            self._v8_revision = slave_utils.GetHashOrRevision(self._v8_dir)
        self.last_change_file = os.path.join(self._staging_dir, 'LAST_CHANGE')
        # The REVISIONS file will record the revisions information of the main
        # components Chromium/WebKit/V8.
        self.revisions_path = os.path.join(self._staging_dir, 'REVISIONS')
        self._build_revision = build_revision
        # Will be initialized in GetLastBuildRevision.
        self.last_chromium_revision = None
        self.last_webkit_revision = None
        self.last_v8_revision = None

        self._files_file = os.path.join(self._tool_dir,
                                        archive_utils.FILES_FILENAME)
        self._test_files = self.BuildOldFilesList(TEST_FILE_NAME)

        self._dual_upload = options.factory_properties.get(
            'dual_upload', False)
        self._archive_files = None
Пример #10
0
def archive(options, args):
    # Disable 'unused argument' warning for 'args' | pylint: disable=W0613
    build_dir = build_directory.GetBuildOutputDirectory()
    src_dir = os.path.abspath(os.path.dirname(build_dir))
    build_dir = os.path.join(build_dir, options.target)

    revision_dir = options.factory_properties.get('revision_dir')
    primary_project = chromium_utils.GetPrimaryProject(options)

    build_sortkey_branch, build_sortkey_value = GetBuildSortKey(
        options, primary_project)
    build_git_commit = GetGitCommit(options, primary_project)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                   '')
    pieces = [chromium_utils.PlatformName(), options.target.lower()]
    if subdir_suffix:
        pieces.append(subdir_suffix)
    subdir = '-'.join(pieces)

    # Components like v8 get a <name>-v8-component-<revision> infix.
    component = ''
    if revision_dir:
        component = '-%s-component' % revision_dir

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    sortkey_path = chromium_utils.GetSortableUploadPathForSortKey(
        build_sortkey_branch, build_sortkey_value)
    zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(),
                                       options.target.lower(), component,
                                       sortkey_path)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)

    gs_metadata = {
        GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value,
    }
    if build_sortkey_branch:
        gs_metadata[
            GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition(
                build_sortkey_branch, build_sortkey_value)
    if build_git_commit:
        gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit

    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl,
                                        metadata=gs_metadata)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
def archive_layout(options, args):
  logging.basicConfig(level=logging.INFO,
                      format='%(asctime)s %(filename)s:%(lineno)-3d'
                             ' %(levelname)s %(message)s',
                      datefmt='%y%m%d %H:%M:%S')
  chrome_dir = os.path.abspath(options.build_dir)
  results_dir_basename = os.path.basename(options.results_dir)
  if options.results_dir is not None:
    options.results_dir = os.path.abspath(os.path.join(options.build_dir,
                                                       options.results_dir))
  else:
    options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
  print 'Archiving results from %s' % options.results_dir
  staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir)
  print 'Staging in %s' % staging_dir
  if not os.path.exists(staging_dir):
    os.makedirs(staging_dir)

  (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir)
  zip_file = chromium_utils.MakeZip(staging_dir,
                                    results_dir_basename,
                                    actual_file_list,
                                    options.results_dir)[1]
  # TODO(ojan): Stop separately uploading full_results.json once garden-o-matic
  # switches to using failing_results.json.
  full_results_json = os.path.join(options.results_dir, 'full_results.json')
  failing_results_json = os.path.join(options.results_dir,
      'failing_results.json')

  # Extract the build name of this slave (e.g., 'chrome-release') from its
  # configuration file if not provided as a param.
  build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
  build_name = re.sub('[ .()]', '_', build_name)

  wc_dir = os.path.dirname(chrome_dir)
  last_change = slave_utils.GetHashOrRevision(wc_dir)

  # TODO(dpranke): Is it safe to assume build_number is not blank? Should we
  # assert() this ?
  build_number = str(options.build_number)
  print 'last change: %s' % last_change
  print 'build name: %s' % build_name
  print 'build number: %s' % build_number
  print 'host name: %s' % socket.gethostname()

  if options.gs_bucket:
    # Create a file containing last_change revision. This file will be uploaded
    # after all layout test results are uploaded so the client can check this
    # file to see if the upload for the revision is complete.
    # See crbug.com/574272 for more details.
    last_change_file = os.path.join(staging_dir, 'LAST_CHANGE')
    with open(last_change_file, 'w') as f:
      f.write(last_change)

    # Copy the results to a directory archived by build number.
    gs_base = '/'.join([options.gs_bucket, build_name, build_number])
    gs_acl = options.gs_acl
    # These files never change, cache for a year.
    cache_control = "public, max-age=31556926"
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    # TODO(dpranke): Remove these two lines once clients are fetching the
    # files from the layout-test-results dir.
    slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
      gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
      cache_control=cache_control)

    # And also to the 'results' directory to provide the 'latest' results
    # and make sure they are not cached at all (Cloud Storage defaults to
    # caching w/ a max-age=3600).
    gs_base = '/'.join([options.gs_bucket, build_name, 'results'])
    cache_control = 'no-cache'
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
        gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
        cache_control=cache_control)

  else:
    # Where to save layout test results.
    dest_parent_dir = os.path.join(archive_utils.Config.www_dir_base,
        results_dir_basename.replace('-', '_'), build_name)
    dest_dir = os.path.join(dest_parent_dir, last_change)

    _MaybeMakeDirectoryOnArchiveHost(dest_dir)
    _CopyFileToArchiveHost(zip_file, dest_dir)
    _CopyFileToArchiveHost(full_results_json, dest_dir)
    _CopyFileToArchiveHost(failing_results_json, dest_dir)
    # Not supported on Google Storage yet.
    _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list,
                                  options)
  return 0
Пример #12
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(
        options.src_dir, options.cros_board)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = (options.staging_dir
                   or slave_utils.GetStagingDir(options.src_dir))
    if not os.path.exists(staging_dir):
        os.makedirs(staging_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.master_name,
        options.build_number,
        options.parent_build_number,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not options.append_deps_patch_sha))

    # TODO(robertocn): Remove this if no one other than bisect uses it.
    if options.append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)
    path_filter = PathMatcher(options)

    # Expand one level deep so that secondary toolchains can be filtered.
    for i in xrange(len(root_files) - 1, -1, -1):
        path = root_files[i]
        # Don't expand directories that will be filtered out.
        if not path_filter.Match(path):
            continue
        abs_path = os.path.join(build_dir, path)
        if os.path.isdir(abs_path):
            root_files[i:i + 1] = [
                os.path.join(path, f) for f in os.listdir(abs_path)
            ]

    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Switch layout tests to use files from 'gen/layout_test_data'
    # and remove this.
    mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
    print 'Include mojom files: %s' % mojom_files
    zip_file_list.extend(mojom_files)

    layout_test_data_files = _LayoutTestFiles(build_dir)
    print 'Include layout test data: %s' % layout_test_data_files
    zip_file_list.extend(layout_test_data_files)

    zip_file = MakeUnversionedArchive(build_dir,
                                      staging_dir,
                                      zip_file_list,
                                      unversioned_base_name,
                                      strip_files=options.strip_files)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = 10
    if options.build_url.startswith('gs://'):
        # Don't keep builds lying around when uploading them to google storage.
        prune_limit = 3
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    urls = {}
    if options.build_url.startswith('gs://'):
        zip_url = UploadToGoogleStorage(versioned_file, revision_file,
                                        options.build_url, options.gs_acl,
                                        options.gsutil_py_path)

        storage_url = ('https://storage.googleapis.com/%s/%s' %
                       (options.build_url[len('gs://'):],
                        os.path.basename(versioned_file)))
        urls['storage_url'] = storage_url
    else:
        staging_path = (os.path.splitdrive(versioned_file)[1].replace(
            os.path.sep, '/'))
        zip_url = 'http://' + options.slave_name + staging_path

    urls['zip_url'] = zip_url

    return urls
Пример #13
0
    def __init__(self, options, build_revision):
        """Sets a number of file and directory paths for convenient use."""

        self.options = options
        self._src_dir = os.path.abspath(options.src_dir)
        self._chrome_dir = os.path.join(self._src_dir, 'chrome')
        # TODO: This scode should not be grabbing so deeply into WebKit.
        #       Worse, this code ends up looking at top-of-tree WebKit
        #       instead of the revision in DEPS.
        self._webkit_dir = os.path.join(self._src_dir, 'third_party', 'WebKit',
                                        'Source', 'WebCore')
        self._v8_dir = os.path.join(self._src_dir, 'v8')
        # TODO: need to get the build *output* directory passed in instead so Linux
        # and Mac don't have to walk up a directory to get to the right directory.
        if chromium_utils.IsWindows():
            self._build_dir = os.path.join(options.build_dir, options.target)
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'win')
        elif chromium_utils.IsLinux():
            self._build_dir = os.path.join(os.path.dirname(options.build_dir),
                                           'out', options.target)
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'linux')
        elif chromium_utils.IsMac():
            self._build_dir = os.path.join(os.path.dirname(options.build_dir),
                                           'xcodebuild', options.target)
            self._tool_dir = os.path.join(self._chrome_dir, 'tools', 'build',
                                          'mac')
        else:
            raise NotImplementedError(
                'Platform "%s" is not currently supported.' % sys.platform)
        self._staging_dir = slave_utils.GetStagingDir(self._src_dir)

        self._symbol_dir_base = options.dirs['symbol_dir_base']
        self._www_dir_base = options.dirs['www_dir_base']
        self._build_name = slave_utils.SlaveBuildName(self._src_dir)
        self._symbol_dir_base = os.path.join(self._symbol_dir_base,
                                             self._build_name)
        self._www_dir_base = os.path.join(self._www_dir_base, self._build_name)

        self._version_file = os.path.join(self._chrome_dir, 'VERSION')

        if options.default_chromium_revision:
            self._chromium_revision = options.default_chromium_revision
        else:
            self._chromium_revision = slave_utils.SubversionRevision(
                self._chrome_dir)
        if options.default_webkit_revision:
            self._webkit_revision = options.default_webkit_revision
        else:
            self._webkit_revision = slave_utils.SubversionRevision(
                self._webkit_dir)
        if options.default_v8_revision:
            self._v8_revision = options.default_v8_revision
        else:
            self._v8_revision = slave_utils.SubversionRevision(self._v8_dir)
        self.last_change_file = os.path.join(self._staging_dir, 'LAST_CHANGE')
        # The REVISIONS file will record the revisions information of the main
        # components Chromium/WebKit/V8.
        self.revisions_path = os.path.join(self._staging_dir, 'REVISIONS')
        self._build_revision = build_revision
        # Will be initialized in GetLastBuildRevision.
        self.last_chromium_revision = None
        self.last_webkit_revision = None
        self.last_v8_revision = None

        self._files_file = os.path.join(self._tool_dir,
                                        archive_utils.FILES_FILENAME)
        self._test_files = self.BuildOldFilesList(TEST_FILE_NAME)

        self._dual_upload = options.factory_properties.get(
            'dual_upload', False)
        self._archive_files = None
Пример #14
0
def archive(options, args):
  build_dir = build_directory.GetBuildOutputDirectory()
  src_dir = os.path.abspath(os.path.dirname(build_dir))
  build_dir = os.path.join(build_dir, options.target)

  revision_dir = options.factory_properties.get('revision_dir')
  (build_revision, _) = slave_utils.GetBuildRevisions(
      src_dir, None, revision_dir)

  staging_dir = slave_utils.GetStagingDir(src_dir)
  chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

  print 'Staging in %s' % build_dir

  # Build the list of files to archive.
  zip_file_list = [f for f in os.listdir(build_dir)
                   if ShouldPackageFile(f, options.target)]

  subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                 '')
  pieces = [chromium_utils.PlatformName(), options.target.lower()]
  if subdir_suffix:
    pieces.append(subdir_suffix)
  subdir = '-'.join(pieces)

  # Components like v8 get a <name>-v8-component-<revision> infix.
  component = ''
  if revision_dir:
    component = '-%s-component' % revision_dir

  prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
  zip_file_name = '%s-%s-%s%s-%s' % (prefix,
                                   chromium_utils.PlatformName(),
                                   options.target.lower(),
                                   component,
                                   build_revision)

  (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                               zip_file_name,
                                               zip_file_list,
                                               build_dir,
                                               raise_error=True)
  chromium_utils.RemoveDirectory(zip_dir)
  if not os.path.exists(zip_file):
    raise StagingError('Failed to make zip package %s' % zip_file)
  chromium_utils.MakeWorldReadable(zip_file)

  # Report the size of the zip file to help catch when it gets too big.
  zip_size = os.stat(zip_file)[stat.ST_SIZE]
  print 'Zip file is %ld bytes' % zip_size

  gs_bucket = options.factory_properties.get('gs_bucket', None)
  gs_acl = options.factory_properties.get('gs_acl', None)
  status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir,
                                      gs_acl=gs_acl)
  if status:
    raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file,
                                                                gs_bucket,
                                                                status))
  else:
    # Delete the file, it is not needed anymore.
    os.remove(zip_file)

  return status
Пример #15
0
def Archive(options):
    build_dir = build_directory.GetBuildOutputDirectory(options.src_dir)
    build_dir = os.path.abspath(os.path.join(build_dir, options.target))

    staging_dir = slave_utils.GetStagingDir(options.src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            options.src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision

    append_deps_patch_sha = options.factory_properties.get(
        'append_deps_patch_sha')

    unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
        options.build_properties,
        build_revision,
        webkit_revision,
        use_try_buildnumber=(not append_deps_patch_sha))

    if append_deps_patch_sha:
        deps_sha = os.path.join('src', 'DEPS.sha')
        if os.path.exists(deps_sha):
            sha = open(deps_sha).read()
            version_suffix = '%s_%s' % (version_suffix, sha.strip())
            print 'Appending sha of the patch: %s' % sha
        else:
            print 'DEPS.sha file not found, not appending sha.'

    print 'Full Staging in %s' % staging_dir
    print 'Build Directory %s' % build_dir

    # Include the revision file in tarballs
    WriteRevisionFile(build_dir, build_revision)

    # Copy the crt files if necessary.
    if options.target == 'Debug' and chromium_utils.IsWindows():
        CopyDebugCRT(build_dir)

    # Build the list of files to archive.
    root_files = os.listdir(build_dir)

    # Remove initial\chrome.ilk. The filtering is only done on toplevel files,
    # and we can't exclude everything in initial since initial\chrome.dll.pdb is
    # needed in the archive. (And we can't delete it on disk because that would
    # slow down the next incremental build).
    if 'initial' in root_files:
        # Expand 'initial' directory by its contents, so that initial\chrome.ilk
        # will be filtered out by the blacklist.
        index = root_files.index('initial')
        root_files[index:index + 1] = [
            os.path.join('initial', f)
            for f in os.listdir(os.path.join(build_dir, 'initial'))
        ]

    path_filter = PathMatcher(options)
    print path_filter
    print('\nActually excluded: %s' %
          [f for f in root_files if not path_filter.Match(f)])

    zip_file_list = [f for f in root_files if path_filter.Match(f)]

    # TODO(yzshen): Once we have swarming support ready, we could use it to
    # archive run time dependencies of tests and remove this step.
    mojom_js_files = MojomJSFiles(build_dir)
    print 'Include mojom JavaScript files: %s' % mojom_js_files
    zip_file_list.extend(mojom_js_files)

    zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
                                      unversioned_base_name,
                                      options.path_filter)

    zip_base, zip_ext, versioned_file = MakeVersionedArchive(
        zip_file, version_suffix, options)

    prune_limit = max(0, int(options.factory_properties.get('prune_limit',
                                                            10)))
    PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)

    # Update the latest revision file in the staging directory
    # to allow testers to figure out the latest packaged revision
    # without downloading tarballs.
    revision_file = WriteRevisionFile(staging_dir, build_revision)

    build_url = (options.build_url
                 or options.factory_properties.get('build_url', ''))
    if build_url.startswith('gs://'):
        gs_acl = options.factory_properties.get('gs_acl')
        UploadToGoogleStorage(versioned_file, revision_file, build_url, gs_acl)

    return 0