Ejemplo n.º 1
0
def main():
  work_dir = os.path.abspath('.')

  print 'Locating NaCl SDK update script at %s' % NACL_SDK_UPDATE_URL
  file_name = NACL_SDK_UPDATE_URL.split('/')[-1]
  response = requests.get(NACL_SDK_UPDATE_URL, verify=True, stream=True)

  print 'Downloading: %s' % file_name
  Retrieve(response, file_name)

  print 'Unzipping %s into %s' % (file_name, work_dir)
  chromium_utils.ExtractZip(file_name, work_dir, verbose=True)

  result = chromium_utils.RunCommand([NACL_TOOL, 'update', '--force'])

  if os.path.exists(CURRENT_PEPPER_BUNDLE):
    print 'Removing current pepper bundle %s' % CURRENT_PEPPER_BUNDLE
    shutil.rmtree(CURRENT_PEPPER_BUNDLE)

  def PepperDirs():
    for x in os.listdir('nacl_sdk'):
      if re.match('pepper_\d+', x):
        yield x

  pepper_rev = max([int(i.split('_')[1]) for i in PepperDirs()])
  pepper_rev_dir = os.path.join('nacl_sdk', 'pepper_' + str(pepper_rev))

  print 'Copying pepper bundle %d to current' % pepper_rev
  shutil.copytree(pepper_rev_dir, CURRENT_PEPPER_BUNDLE, symlinks=True)

  return result
Ejemplo n.º 2
0
def main():
    option_parser = optparse.OptionParser()
    option_parser.add_option('--pepper-channel',
                             default='stable',
                             help='Pepper channel (stable|beta|canary)')
    options, _ = option_parser.parse_args()

    work_dir = os.path.abspath('.')

    print 'Locating NaCl SDK update script at %s' % NACL_SDK_UPDATE_URL
    file_name = NACL_SDK_UPDATE_URL.split('/')[-1]
    response = requests.get(NACL_SDK_UPDATE_URL, verify=True, stream=True)

    file_hash = None
    if os.path.exists(file_name):
        file_hash = GetFileHash(file_name)

    print 'Downloading: %s' % file_name
    Retrieve(response, file_name)

    # Only extract if file changed. Extraction overwrites the sdk tools and the
    # state about which pepper revisions are up to date.
    if file_hash != GetFileHash(file_name):
        print 'Unzipping %s into %s' % (file_name, work_dir)
        chromium_utils.ExtractZip(file_name, work_dir, verbose=True)
    else:
        print 'Existing %s is up to date.' % file_name

    print 'Listing available pepper bundles:'
    output = chromium_utils.GetCommandOutput([NACL_TOOL, 'list'])
    print output
    pepper_rev = GetRevisionName(output, options.pepper_channel)

    print 'Updating pepper bundle %s' % pepper_rev
    cmd = [NACL_TOOL, 'update', pepper_rev, '--force']
    result = chromium_utils.RunCommand(cmd)

    if os.path.exists(CURRENT_PEPPER_BUNDLE):
        print 'Removing current pepper bundle %s' % CURRENT_PEPPER_BUNDLE
        shutil.rmtree(CURRENT_PEPPER_BUNDLE)

    pepper_rev_dir = os.path.join('nacl_sdk', pepper_rev)

    print 'Copying pepper bundle %s to current' % pepper_rev
    shutil.copytree(pepper_rev_dir, CURRENT_PEPPER_BUNDLE, symlinks=True)

    return result
Ejemplo n.º 3
0
def unpack(options):
    if not options.url and not options.builder:
        raise StagingError('Either a test url or builder name is required.')

    # Remove existing test archive.
    if os.path.exists(AUTO_PATH):
        print 'Removing existing directory at', AUTO_PATH
        shutil.rmtree(AUTO_PATH)

    os.mkdir(AUTO_PATH)

    # Download archive.
    if options.url:
        url = options.url
    else:
        # Find latest archive.
        branch = 'o3d'
        latest_path = branch + '/latest_' + options.builder
        latest_url = ARCHIVE_BASE_URL + latest_path

        local_latest = os.path.join(AUTO_PATH, 'latest')
        print 'Downloading latest file from', latest_url
        urllib.urlretrieve(latest_url, local_latest)

        latest_file = file(local_latest, 'r')
        url = ARCHIVE_BASE_URL + branch + '/' + latest_file.readline()
    try:
        local_archive_path = DownloadTestArchive(url)
    except IOError:
        print 'IOError while downloading test archive from', url
        return 2

    # Unzip archive.
    output_dir = os.path.normpath(os.path.join(O3D_PATH, '..'))
    print 'Extracting test archive to', output_dir
    utils.ExtractZip(local_archive_path, output_dir, False)

    # Copy archive's automation scripts into auto directory.
    print 'Copying automation scripts from', O3D_SRC_AUTO_PATH, 'to', SCRIPTS_PATH
    shutil.copytree(O3D_SRC_AUTO_PATH, SCRIPTS_PATH)
    return 0
Ejemplo n.º 4
0
def _ArchiveFullLayoutTestResults(staging_dir, dest_dir, diff_file_list,
    options):
  # Copy the actual and diff files to the web server.
  # Don't clobber the staging_dir in the MakeZip call so that it keeps the
  # files from the previous MakeZip call on diff_file_list.
  print "archiving results + diffs"
  full_zip_file = chromium_utils.MakeZip(staging_dir,
      'layout-test-results', diff_file_list, options.results_dir,
      remove_archive_directory=False)[1]
  slave_utils.CopyFileToArchiveHost(full_zip_file, dest_dir)

  # Extract the files on the web server.
  extract_dir = os.path.join(dest_dir, 'results')
  print 'extracting zip file to %s' % extract_dir

  if chromium_utils.IsWindows():
    chromium_utils.ExtractZip(full_zip_file, extract_dir)
  elif chromium_utils.IsLinux() or chromium_utils.IsMac():
    remote_zip_file = os.path.join(dest_dir, os.path.basename(full_zip_file))
    chromium_utils.SshExtractZip(config.Archive.archive_host, remote_zip_file,
                                 extract_dir)
Ejemplo n.º 5
0
def real_main(options):
    """ Download a build, extract it to build\BuildDir\full-build-win32
      and rename it to build\BuildDir\Target
  """
    abs_build_dir = os.path.abspath(
        build_directory.GetBuildOutputDirectory(options.src_dir))
    target_build_output_dir = os.path.join(abs_build_dir, options.target)

    # Generic name for the archive.
    archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

    # Just take the zip off the name for the output directory name.
    output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

    src_dir = os.path.dirname(abs_build_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision
    base_url, url = GetBuildUrl(options, build_revision, webkit_revision)
    archive_name = os.path.basename(base_url)

    if url.startswith('gs://'):
        handler = GSHandler(url=url, archive_name=archive_name)
    else:
        handler = WebHandler(url=url, archive_name=archive_name)

    # We try to download and extract 3 times.
    for tries in range(1, 4):
        print 'Try %d: Fetching build from %s...' % (tries, url)

        failure = False

        # Check if the url exists.
        if not handler.is_present():
            print '%s is not found' % url
            failure = True

            # When 'halt_on_missing_build' is present in factory_properties and if
            # 'revision' is set in build properties, we assume the build is
            # triggered automatically and so we halt on a missing build zip.  The
            # other case is if the build is forced, in which case we keep trying
            # later by looking for the latest build that's available.
            if (options.factory_properties.get('halt_on_missing_build', False)
                    and 'revision' in options.build_properties
                    and options.build_properties['revision'] != ''):
                return slave_utils.ERROR_EXIT_CODE

        # If the url is valid, we download the file.
        if not failure:
            if not handler.download():
                failure = True

        # If the versioned url failed, we try to get the latest build.
        if failure:
            if url.startswith('gs://'):
                continue
            else:
                print 'Fetching latest build at %s' % base_url
                handler.url = base_url
                if not handler.download():
                    continue

        print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
        try:
            chromium_utils.RemoveDirectory(target_build_output_dir)
            chromium_utils.ExtractZip(archive_name, abs_build_dir)
            # For Chrome builds, the build will be stored in chrome-win32.
            if 'full-build-win32' in output_dir:
                chrome_dir = output_dir.replace('full-build-win32',
                                                'chrome-win32')
                if os.path.exists(chrome_dir):
                    output_dir = chrome_dir

            print 'Moving build from %s to %s' % (output_dir,
                                                  target_build_output_dir)
            shutil.move(output_dir, target_build_output_dir)
        except (OSError, IOError, chromium_utils.ExternalError):
            print 'Failed to extract the build.'
            # Print out the traceback in a nice format
            traceback.print_exc()
            # Try again...
            continue

        # If we got the latest build, then figure out its revision number.
        if failure:
            print "Trying to determine the latest build's revision number..."
            try:
                build_revision_file_name = os.path.join(
                    target_build_output_dir,
                    chromium_utils.FULL_BUILD_REVISION_FILENAME)
                build_revision_file = open(build_revision_file_name, 'r')
                print 'Latest build is revision: %s' % build_revision_file.read(
                )
                build_revision_file.close()
            except IOError:
                print "Could not determine the latest build's revision number"

        if failure:
            # We successfully extracted the archive, but it was the generic one.
            return slave_utils.WARNING_EXIT_CODE
        return 0

    # If we get here, that means that it failed 3 times. We return a failure.
    return slave_utils.ERROR_EXIT_CODE
Ejemplo n.º 6
0
def real_main(options):
  """ Download a build, extract it to build\\BuildDir\\full-build-win32
      and rename it to build\\BuildDir\\Target
  """
  abs_build_dir = os.path.abspath(
      build_directory.GetBuildOutputDirectory(options.src_dir))
  target_build_output_dir = os.path.join(abs_build_dir, options.target)

  # Generic name for the archive.
  archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

  # Just take the zip off the name for the output directory name.
  output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

  src_dir = os.path.dirname(abs_build_dir)
  if not options.build_revision and not options.build_archive_url:
    (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
        src_dir, options.webkit_dir, options.revision_dir)
  else:
    build_revision = options.build_revision
    webkit_revision = options.webkit_revision
  url, archive_name = GetBuildUrl(options, build_revision, webkit_revision)
  if archive_name is None:
    archive_name = 'build.zip'
    base_url = None
  else:
    base_url = '/'.join(url.split('/')[:-1] + [archive_name])

  if url.startswith('gs://'):
    handler = GSHandler(url=url, archive_name=archive_name)
  else:
    handler = WebHandler(url=url, archive_name=archive_name)

  # We try to download and extract 3 times.
  for tries in range(1, 4):
    print 'Try %d: Fetching build from %s...' % (tries, url)

    failure = False

    # If the url is valid, we download the file.
    if not failure:
      if not handler.download():
        if options.halt_on_missing_build:
          return slave_utils.ERROR_EXIT_CODE
        failure = True

    # If the versioned url failed, we try to get the latest build.
    if failure:
      if url.startswith('gs://') or not base_url:
        continue
      else:
        print 'Fetching latest build at %s' % base_url
        base_handler = handler.__class__(base_url, handler.archive_name)
        if not base_handler.download():
          continue

    print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
    try:
      chromium_utils.RemoveDirectory(target_build_output_dir)
      chromium_utils.ExtractZip(archive_name, abs_build_dir)
      # For Chrome builds, the build will be stored in chrome-win32.
      if 'full-build-win32' in output_dir:
        chrome_dir = output_dir.replace('full-build-win32', 'chrome-win32')
        if os.path.exists(chrome_dir):
          output_dir = chrome_dir

      print 'Moving build from %s to %s' % (output_dir, target_build_output_dir)
      shutil.move(output_dir, target_build_output_dir)
    except (OSError, IOError, chromium_utils.ExternalError):
      print 'Failed to extract the build.'
      # Print out the traceback in a nice format
      traceback.print_exc()
      # Try again...
      continue

    # If we got the latest build, then figure out its revision number.
    if failure:
      print "Trying to determine the latest build's revision number..."
      try:
        build_revision_file_name = os.path.join(
            target_build_output_dir,
            chromium_utils.FULL_BUILD_REVISION_FILENAME)
        build_revision_file = open(build_revision_file_name, 'r')
        print 'Latest build is revision: %s' % build_revision_file.read()
        build_revision_file.close()
      except IOError:
        print "Could not determine the latest build's revision number"

    if failure:
      # We successfully extracted the archive, but it was the generic one.
      return slave_utils.WARNING_EXIT_CODE
    return 0

  # If we get here, that means that it failed 3 times. We return a failure.
  return slave_utils.ERROR_EXIT_CODE
def real_main(options):
    """Download a build and extract.

  Extract to build\\BuildDir\\full-build-win32 and rename it to
  build\\BuildDir\\Target
  """
    target_build_output_dir = os.path.join(options.build_dir, options.target)
    platform = chromium_utils.PlatformName()

    revision = options.revision
    if not revision:
        revision = GetLatestRevision(options.build_url, platform)
        if not revision:
            print 'Failed to get revision number.'
            return slave_utils.ERROR_EXIT_CODE

    archive_url = GetBuildUrl(options.build_url, platform, revision)
    archive_name = 'dynamorio.' + os.path.basename(archive_url).split('.',
                                                                      1)[1]

    temp_dir = tempfile.mkdtemp()
    try:
        # We try to download and extract 3 times.
        for tries in range(1, 4):
            print 'Try %d: Fetching build from %s' % (tries, archive_url)

            failure = False
            try:
                print '%s/%s' % (archive_url, archive_name)
                urllib.urlretrieve(archive_url, archive_name)
                print '\nDownload complete'
            except IOError:
                print '\nFailed to download build'
                failure = True
                if options.halt_on_missing_build:
                    return slave_utils.ERROR_EXIT_CODE
            if failure:
                continue

            print 'Extracting build %s to %s...' % (archive_name,
                                                    options.build_dir)
            try:
                chromium_utils.RemoveDirectory(target_build_output_dir)
                chromium_utils.ExtractZip(archive_name, temp_dir)

                # Look for the top level directory from extracted build.
                entries = os.listdir(temp_dir)
                output_dir = temp_dir
                if (len(entries) == 1 and os.path.isdir(
                        os.path.join(output_dir, entries[0]))):
                    output_dir = os.path.join(output_dir, entries[0])

                print 'Moving build from %s to %s' % (output_dir,
                                                      target_build_output_dir)
                shutil.move(output_dir, target_build_output_dir)
            except (OSError, IOError, chromium_utils.ExternalError):
                print 'Failed to extract the build.'
                # Print out the traceback in a nice format
                traceback.print_exc()
                # Try again...
                time.sleep(3)
                continue
            return 0
    finally:
        chromium_utils.RemoveDirectory(temp_dir)

    # If we get here, that means that it failed 3 times. We return a failure.
    return slave_utils.ERROR_EXIT_CODE