Esempio n. 1
0
def MakeUnversionedArchive(build_dir,
                           staging_dir,
                           zip_file_list,
                           zip_file_name,
                           strip_files=None):
    """Creates an unversioned full build archive.
  Returns the path of the created archive."""
    # Prevents having zip_file_list to contain duplicates
    zip_file_list = list(set(zip_file_list))
    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True,
                                                 strip_files=strip_files)

    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big and
    # can cause bot failures from timeouts during downloads to testers.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    return zip_file
Esempio n. 2
0
def archive(options, args):
    build_dir, _ = chromium_utils.ConvertBuildDirToLegacy(
        options.build_dir, use_out=chromium_utils.IsLinux())
    build_dir = os.path.join(build_dir, options.target)
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir = None

    # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not
    # ready with the server-side change.
    if chromium_utils.IsMac():
        subdir = '%s-%s' % (chromium_utils.PlatformName(),
                            options.target.lower())

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Esempio n. 3
0
def CreateArchive(build_dir,
                  staging_dir,
                  files_list,
                  archive_name,
                  allow_missing=True):
    """Put files into an archive dir as well as a zip of said archive dir.

  This method takes the list of files to archive, then prunes non-existing
  files from that list.

  archive_name is the desired name for the output zip file. It is also used as
  the basis for the directory that the files are zipped into. For instance,
  'foo.zip' creates the file foo.zip with the hierarchy foo/*. 'some_archive'
  creates the file 'some_archive' with the hierarhy some_archive_unzipped/*
  (the directory name is different to prevent name conflicts when extracting to
  the directory containing 'some_archive').

  If files_list is empty or has no existing CreateArchive returns ('', '').
  Otherwise, this method returns the archive directory the files are
  copied to and the full path of the zip file in a tuple.
  """

    print 'Creating archive %s ...' % archive_name

    if allow_missing:
        # Filter out files that don't exist.
        filtered_file_list = [
            f.strip() for f in files_list
            if os.path.exists(os.path.join(build_dir, f.strip()))
        ]
    else:
        filtered_file_list = list(files_list)

    if not filtered_file_list:
        # We have no files to archive, don't create an empty zip file.
        print 'WARNING: No files to archive.'
        return ('', '')

    if archive_name.endswith('.zip'):
        archive_dirname = archive_name[:-4]
    else:
        archive_dirname = archive_name + '_unzipped'

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 archive_dirname,
                                                 filtered_file_list,
                                                 build_dir,
                                                 raise_error=not allow_missing)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)

    if os.path.basename(zip_file) != archive_name:
        orig_zip = zip_file
        zip_file = os.path.join(os.path.dirname(orig_zip), archive_name)
        print 'Renaming archive: "%s" -> "%s"' % (orig_zip, zip_file)
        chromium_utils.MoveFile(orig_zip, zip_file)
    return (zip_dir, zip_file)
def archive_layout(options, args):
    logging.basicConfig(level=logging.INFO,
                        format='%(asctime)s %(filename)s:%(lineno)-3d'
                        ' %(levelname)s %(message)s',
                        datefmt='%y%m%d %H:%M:%S')
    chrome_dir = os.path.abspath(options.build_dir)
    results_dir_basename = os.path.basename(options.results_dir)
    if options.results_dir is not None:
        options.results_dir = os.path.abspath(
            os.path.join(options.build_dir, options.results_dir))
    else:
        options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
    print 'Archiving results from %s' % options.results_dir
    staging_dir = slave_utils.GetStagingDir(chrome_dir)
    print 'Staging in %s' % staging_dir

    (actual_file_list,
     diff_file_list) = _CollectArchiveFiles(options.results_dir)
    zip_file = chromium_utils.MakeZip(staging_dir, results_dir_basename,
                                      actual_file_list, options.results_dir)[1]
    full_results_json = os.path.join(options.results_dir, 'full_results.json')

    # Extract the build name of this slave (e.g., 'chrome-release') from its
    # configuration file if not provided as a param.
    build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
    build_name = re.sub('[ .()]', '_', build_name)

    last_change = str(slave_utils.SubversionRevision(chrome_dir))
    print 'last change: %s' % last_change
    print 'build name: %s' % build_name
    print 'host name: %s' % socket.gethostname()

    # Where to save layout test results.
    dest_parent_dir = os.path.join(config.Archive.www_dir_base,
                                   results_dir_basename.replace('-', '_'),
                                   build_name)
    dest_dir = os.path.join(dest_parent_dir, last_change)

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    if gs_bucket:
        gs_base = '/'.join([gs_bucket, build_name, last_change])
        gs_acl = options.factory_properties.get('gs_acl', None)
        slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl)
        slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl)
    else:
        slave_utils.MaybeMakeDirectoryOnArchiveHost(dest_dir)
        slave_utils.CopyFileToArchiveHost(zip_file, dest_dir)
        slave_utils.CopyFileToArchiveHost(full_results_json, dest_dir)
        # Not supported on Google Storage yet.
        _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir,
                                      diff_file_list, options)
    return 0
def main(argv):
    with open(sys.argv[3], 'r') as f:
        zip_file_list = json.load(f)
    (zip_dir, zip_file) = chromium_utils.MakeZip(sys.argv[1],
                                                 sys.argv[2],
                                                 zip_file_list,
                                                 sys.argv[4],
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise Exception('Failed to make zip package %s' % zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size
Esempio n. 6
0
def archive(options, args):
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))
    build_dir = os.path.join(src_dir, 'out', options.target)
    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    prefix = options.factory_properties.get('asan_archive_name', 'asan')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Esempio n. 7
0
def _ArchiveFullLayoutTestResults(staging_dir, dest_dir, diff_file_list,
    options):
  # Copy the actual and diff files to the web server.
  # Don't clobber the staging_dir in the MakeZip call so that it keeps the
  # files from the previous MakeZip call on diff_file_list.
  print "archiving results + diffs"
  full_zip_file = chromium_utils.MakeZip(staging_dir,
      'layout-test-results', diff_file_list, options.results_dir,
      remove_archive_directory=False)[1]
  slave_utils.CopyFileToArchiveHost(full_zip_file, dest_dir)

  # Extract the files on the web server.
  extract_dir = os.path.join(dest_dir, 'results')
  print 'extracting zip file to %s' % extract_dir

  if chromium_utils.IsWindows():
    chromium_utils.ExtractZip(full_zip_file, extract_dir)
  elif chromium_utils.IsLinux() or chromium_utils.IsMac():
    remote_zip_file = os.path.join(dest_dir, os.path.basename(full_zip_file))
    chromium_utils.SshExtractZip(config.Archive.archive_host, remote_zip_file,
                                 extract_dir)
def archive(options, args):
    # Disable 'unused argument' warning for 'args' | pylint: disable=W0613
    build_dir = build_directory.GetBuildOutputDirectory()
    src_dir = os.path.abspath(os.path.dirname(build_dir))
    build_dir = os.path.join(build_dir, options.target)

    revision_dir = options.factory_properties.get('revision_dir')
    primary_project = chromium_utils.GetPrimaryProject(options)

    build_sortkey_branch, build_sortkey_value = GetBuildSortKey(
        options, primary_project)
    build_git_commit = GetGitCommit(options, primary_project)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                   '')
    pieces = [chromium_utils.PlatformName(), options.target.lower()]
    if subdir_suffix:
        pieces.append(subdir_suffix)
    subdir = '-'.join(pieces)

    # Components like v8 get a <name>-v8-component-<revision> infix.
    component = ''
    if revision_dir:
        component = '-%s-component' % revision_dir

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    sortkey_path = chromium_utils.GetSortableUploadPathForSortKey(
        build_sortkey_branch, build_sortkey_value)
    zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(),
                                       options.target.lower(), component,
                                       sortkey_path)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)

    gs_metadata = {
        GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value,
    }
    if build_sortkey_branch:
        gs_metadata[
            GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition(
                build_sortkey_branch, build_sortkey_value)
    if build_git_commit:
        gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit

    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl,
                                        metadata=gs_metadata)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
def archive_layout(options, args):
  logging.basicConfig(level=logging.INFO,
                      format='%(asctime)s %(filename)s:%(lineno)-3d'
                             ' %(levelname)s %(message)s',
                      datefmt='%y%m%d %H:%M:%S')
  chrome_dir = os.path.abspath(options.build_dir)
  results_dir_basename = os.path.basename(options.results_dir)
  if options.results_dir is not None:
    options.results_dir = os.path.abspath(os.path.join(options.build_dir,
                                                       options.results_dir))
  else:
    options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
  print 'Archiving results from %s' % options.results_dir
  staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir)
  print 'Staging in %s' % staging_dir
  if not os.path.exists(staging_dir):
    os.makedirs(staging_dir)

  (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir)
  zip_file = chromium_utils.MakeZip(staging_dir,
                                    results_dir_basename,
                                    actual_file_list,
                                    options.results_dir)[1]
  # TODO(ojan): Stop separately uploading full_results.json once garden-o-matic
  # switches to using failing_results.json.
  full_results_json = os.path.join(options.results_dir, 'full_results.json')
  failing_results_json = os.path.join(options.results_dir,
      'failing_results.json')

  # Extract the build name of this slave (e.g., 'chrome-release') from its
  # configuration file if not provided as a param.
  build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
  build_name = re.sub('[ .()]', '_', build_name)

  wc_dir = os.path.dirname(chrome_dir)
  last_change = slave_utils.GetHashOrRevision(wc_dir)

  # TODO(dpranke): Is it safe to assume build_number is not blank? Should we
  # assert() this ?
  build_number = str(options.build_number)
  print 'last change: %s' % last_change
  print 'build name: %s' % build_name
  print 'build number: %s' % build_number
  print 'host name: %s' % socket.gethostname()

  if options.gs_bucket:
    # Create a file containing last_change revision. This file will be uploaded
    # after all layout test results are uploaded so the client can check this
    # file to see if the upload for the revision is complete.
    # See crbug.com/574272 for more details.
    last_change_file = os.path.join(staging_dir, 'LAST_CHANGE')
    with open(last_change_file, 'w') as f:
      f.write(last_change)

    # Copy the results to a directory archived by build number.
    gs_base = '/'.join([options.gs_bucket, build_name, build_number])
    gs_acl = options.gs_acl
    # These files never change, cache for a year.
    cache_control = "public, max-age=31556926"
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    # TODO(dpranke): Remove these two lines once clients are fetching the
    # files from the layout-test-results dir.
    slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
      gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
      cache_control=cache_control)

    # And also to the 'results' directory to provide the 'latest' results
    # and make sure they are not cached at all (Cloud Storage defaults to
    # caching w/ a max-age=3600).
    gs_base = '/'.join([options.gs_bucket, build_name, 'results'])
    cache_control = 'no-cache'
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
        gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
        cache_control=cache_control)

  else:
    # Where to save layout test results.
    dest_parent_dir = os.path.join(archive_utils.Config.www_dir_base,
        results_dir_basename.replace('-', '_'), build_name)
    dest_dir = os.path.join(dest_parent_dir, last_change)

    _MaybeMakeDirectoryOnArchiveHost(dest_dir)
    _CopyFileToArchiveHost(zip_file, dest_dir)
    _CopyFileToArchiveHost(full_results_json, dest_dir)
    _CopyFileToArchiveHost(failing_results_json, dest_dir)
    # Not supported on Google Storage yet.
    _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list,
                                  options)
  return 0
Esempio n. 10
0
    build_revision_file_name = 'FULL_BUILD_REVISION'
    build_revision_path = os.path.join(build_dir, build_revision_file_name)
    try:
        build_revision_file = open(build_revision_path, 'w')
        build_revision_file.write('%d' % build_revision)
        build_revision_file.close()
        if chromium_utils.IsMac() or chromium_utils.IsLinux():
            os.chmod(build_revision_path, 0644)
        zip_file_list.append(build_revision_file_name)
    except IOError:
        print 'Writing to revision file %s failed ' % build_revision_path

    zip_file_name = 'full-build-%s' % chromium_utils.PlatformName()
    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    if chromium_utils.IsMac() or chromium_utils.IsLinux():
        os.chmod(zip_file, 0644)

    # Report the size of the zip file to help catch when it gets too big and
    # can cause bot failures from timeouts during downloads to testers.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    # Create a versioned copy of the file.
    versioned_file = zip_file.replace('.zip', '_%d.zip' % build_revision)
Esempio n. 11
0
def archive(options, args):
  build_dir = build_directory.GetBuildOutputDirectory()
  src_dir = os.path.abspath(os.path.dirname(build_dir))
  build_dir = os.path.join(build_dir, options.target)

  revision_dir = options.factory_properties.get('revision_dir')
  (build_revision, _) = slave_utils.GetBuildRevisions(
      src_dir, None, revision_dir)

  staging_dir = slave_utils.GetStagingDir(src_dir)
  chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

  print 'Staging in %s' % build_dir

  # Build the list of files to archive.
  zip_file_list = [f for f in os.listdir(build_dir)
                   if ShouldPackageFile(f, options.target)]

  subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                 '')
  pieces = [chromium_utils.PlatformName(), options.target.lower()]
  if subdir_suffix:
    pieces.append(subdir_suffix)
  subdir = '-'.join(pieces)

  # Components like v8 get a <name>-v8-component-<revision> infix.
  component = ''
  if revision_dir:
    component = '-%s-component' % revision_dir

  prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
  zip_file_name = '%s-%s-%s%s-%s' % (prefix,
                                   chromium_utils.PlatformName(),
                                   options.target.lower(),
                                   component,
                                   build_revision)

  (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                               zip_file_name,
                                               zip_file_list,
                                               build_dir,
                                               raise_error=True)
  chromium_utils.RemoveDirectory(zip_dir)
  if not os.path.exists(zip_file):
    raise StagingError('Failed to make zip package %s' % zip_file)
  chromium_utils.MakeWorldReadable(zip_file)

  # Report the size of the zip file to help catch when it gets too big.
  zip_size = os.stat(zip_file)[stat.ST_SIZE]
  print 'Zip file is %ld bytes' % zip_size

  gs_bucket = options.factory_properties.get('gs_bucket', None)
  gs_acl = options.factory_properties.get('gs_acl', None)
  status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir,
                                      gs_acl=gs_acl)
  if status:
    raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file,
                                                                gs_bucket,
                                                                status))
  else:
    # Delete the file, it is not needed anymore.
    os.remove(zip_file)

  return status
Esempio n. 12
0
def main(argv):
    if len(argv) != 3:
        print 'Usage: prepare_selenium_tests.py <o3d_src_root> <destination>'
        print 'Exiting...'
        return 1

    # Make given directories absolute before changing the working directory.
    src_root = os.path.abspath(argv[1])
    o3d_dir = os.path.join(src_root, 'o3d')
    o3d_internal_dir = os.path.join(src_root, 'o3d-internal')
    destination = os.path.abspath(argv[2])
    config_dir = os.path.abspath(os.path.dirname(__file__))
    config_file = os.path.join(config_dir, ARCHIVE_CONFIG_NAME)

    print 'O3D source root:', src_root
    print 'Destination:', destination
    print 'Config file:', config_file

    # Change umask on linux so that outputs (latest file and zip) are readable.
    if utils.IsLinux():
        mask = os.umask(0022)

    # Build ChangeResolution project.
    BuildChangeResolution(src_root)

    # Create test archive.
    files = GetO3DArchiveFiles(src_root, config_file)
    zip_name = 'o3d'
    utils.MakeZip(destination, zip_name, files, src_root)
    zip_path = os.path.join(destination, zip_name + '.zip')
    print 'Zip archive created: %s' % zip_path

    # Find builder name and revision #s.
    builder_name = slave_utils.SlaveBuildName(o3d_dir)
    o3d_rev = str(slave_utils.SubversionRevision(o3d_dir))
    o3d_internal_rev = str(slave_utils.SubversionRevision(o3d_internal_dir))
    package_name = 'test_' + builder_name + '.zip'
    package_dir = o3d_rev + '_' + o3d_internal_rev
    package_path = package_dir + '/' + package_name

    print 'Builder name:', builder_name
    print 'O3D revision:', o3d_rev
    print 'O3D-internal revision:', o3d_internal_rev
    print 'Package path:', package_path

    # Create latest file.
    latest_path = os.path.join(destination, 'latest_' + builder_name)
    file(latest_path, 'w').write(package_path)

    # Upload files.
    package_dst = ('snapshots/o3d/test_packages/o3d/' + package_dir + '/' +
                   package_name)
    latest_dst = 'snapshots/o3d/test_packages/o3d/latest_' + builder_name

    UploadFile(zip_path, package_dst)
    UploadFile(latest_path, latest_dst)

    # Reset the umask on linux.
    if utils.IsLinux():
        os.umask(mask)

    return 0