Ejemplo n.º 1
0
    def Patch(self):
        stamp_file = os.path.join(self.GetStampDir(), 'nacl_patch')
        src_dir = self.GetBuildLocation()
        if self.URL is None:
            return

        if os.path.exists(stamp_file):
            self.Log('Skipping patch step (cleaning source tree)')
            cmd = ['git', 'clean', '-f', '-d']
            if not util.log_level > util.LOG_INFO:
                cmd.append('-q')
            self.RunCmd(cmd)
            return

        util.LogHeading('Patching')
        InitGitRepo(src_dir)
        if os.path.exists(self.GetPatchFile()):
            LogVerbose('applying patch to: %s' % src_dir)
            cmd = ['patch', '-p1', '-g0', '--no-backup-if-mismatch']
            with open(self.GetPatchFile()) as f:
                self.RunCmd(cmd, stdin=f)
            self.RunCmd(['git', 'add', '.'])
            self.RunCmd(['git', 'commit', '-m', 'Apply naclports patch'])

        WriteStamp(stamp_file, '')
Ejemplo n.º 2
0
def LoadCanned(parts):
  # Return an empty partition for the no-sharding case.
  if parts == 1:
    return [[]]
  partitions = []
  partition = []
  input_file = os.path.join(SCRIPT_DIR, 'partition%d.txt' % parts)
  LogVerbose("LoadCanned: %s" % input_file)
  with open(input_file) as fh:
    for line in fh:
      if line.strip()[0] == '#':
        continue
      if line.startswith('  '):
        partition.append(line[2:].strip())
      else:
        if partition:
          partitions.append(partition)
          partition = []
  assert not partition
  assert len(partitions) == parts, partitions
  # Return a small set of packages for testing.
  if os.environ.get('TEST_BUILDBOT'):
    partitions[0] = [
        'glibc-compat',
        'nacl-spawn',
        'ncurses',
        'readline',
        'libtar',
        'zlib',
        'lua5.2',
        'lua-ppapi',
    ]
  return partitions
Ejemplo n.º 3
0
def GetCanned(index, parts):
    assert index >= 0 and index < parts, [index, parts]
    partitions = LoadCanned(parts)
    partitions = FixupCanned(partitions)
    LogVerbose("Found %d packages for shard %d" %
               (len(partitions[index]), index))
    return partitions[index]
Ejemplo n.º 4
0
def DownloadFiles(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = naclports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or CheckHash(file_info.name, file_info.md5):
        Log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def Check(file_info):
    if check_hashes and not CheckHash(file_info.name, file_info.md5):
      raise naclports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, GetHash(file_info.name)))

  if not files_to_download:
    Log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    Log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), FormatSize(total_size)))

    gsutil = FindGsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        LogVerbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          Check(file_info)
    else:
      for file_info in files_to_download:
        naclports.DownloadFile(file_info.name, file_info.url)
        Check(file_info)

  return filenames
Ejemplo n.º 5
0
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='naclports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -le (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        naclports.SetVerbose(True)

    sdk_version = naclports.util.GetSDKVersion()
    Log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/packages' % (
        naclports.GS_BUCKET, sdk_version, args.revision)
    gs_url = 'gs://' + base_path
    gsutil = naclports.util.FindInPath('gsutil.py')
    listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'listing.txt')
    if args.cache_listing and os.path.exists(listing_file):
        Log('Using pre-cached gs listing: %s' % listing_file)
        with open(listing_file) as f:
            listing = f.read()
    else:
        Log('Searching for packages at: %s' % gs_url)
        cmd = [sys.executable, gsutil, 'ls', '-le', gs_url]
        LogVerbose('Running: %s' % str(cmd))
        try:
            listing = subprocess.check_output(cmd)
        except subprocess.CalledProcessError as e:
            naclports.Error(e)
            return 1

    all_files = ParseGsUtilLs(listing)
    if args.cache_listing and not os.path.exists(listing_file):
        with open(listing_file, 'w') as f:
            f.write(listing)

    Log('Found %d packages [%s]' %
        (len(all_files), FormatSize(sum(f.size for f in all_files))))

    binaries = DownloadFiles(all_files, not args.skip_md5)
    index_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt')
    Log('Generating %s' % index_file)
    naclports.package_index.WriteIndex(index_file, binaries)
    Log('Done')
    return 0
Ejemplo n.º 6
0
def ExtractArchive(archive, destination):
    ext = os.path.splitext(archive)[1]
    if ext in ('.gz', '.tgz', '.bz2', '.xz'):
        cmd = ['tar', 'xf', archive, '-C', destination]
    elif ext in ('.zip', ):
        cmd = ['unzip', '-q', '-d', destination, archive]
    else:
        raise Error('unhandled extension: %s' % ext)
    LogVerbose(cmd)
    subprocess.check_call(cmd)
Ejemplo n.º 7
0
def main(args):
  parser = argparse.ArgumentParser()
  parser.add_argument('--check', action='store_true',
                      help='check canned partition information is up-to-date.')
  parser.add_argument('-v', '--verbose', action='store_true',
                      help='Output extra information.')
  parser.add_argument('-t', '--print-canned', type=int,
                      help='Print a the canned partition list and exit.')
  parser.add_argument('-b', '--bot-prefix', help='builder name prefix.',
                      default='linux-newlib-')
  parser.add_argument('-n', '--num-bots',
                      help='Number of builders on the waterfall to collect '
                      'data from or to print a canned partition for.',
                      type=int, default=3)
  parser.add_argument('-p', '--num-parts',
                      help='Number of parts to partition things into '
                      '(this will differ from --num-bots when changing the '
                      'number of shards).',
                      type=int, default=3)
  parser.add_argument('--build-number', help='Builder number to look at for '
                      'historical data on build times.', type=int, default=-1)
  options = parser.parse_args(args)
  naclports.SetVerbose(options.verbose)

  if options.check:
    for num_bots in xrange(1, 6):
      print('Checking partioning with %d bot(s)' % (num_bots))
      # GetCanned with raise an Error if the canned partition information is
      # bad, which in turn will trigger a non-zero return from this script.
      GetCanned(0, num_bots)
    return

  if options.print_canned is not None:
    PrintCanned(options.print_canned, options.num_bots)
    return

  projects = Projects()
  for bot in range(options.num_bots):
    bot_name = '%s%d' % (options.bot_prefix, bot)
    LogVerbose('Attempting to add data from "%s"' % bot_name)
    projects.AddDataFromBuilder(bot_name, options.build_number)
  projects.PostProcessDeps()

  parts = Partition(projects, options.num_parts)
  for i, project_times in enumerate(parts):
    print('builder %d (total: %d)' % (i, project_times.total_time))
    project_names = project_times.TopologicallySortedProjectNames(projects)
    print('  %s' % '\n  '.join(project_names))

  times = list(sorted(part.total_time for part in parts))
  difference = 0
  for i in range(1, len(times)):
    difference += times[i] - times[i - 1]
  print('Difference between total time of builders: %d' % difference)
Ejemplo n.º 8
0
def DownloadDataFromBuilder(builder, build):
  max_tries = 10

  for _ in xrange(max_tries):
    url = 'http://build.chromium.org/p/client.nacl.ports/json'
    url += '/builders/%s/builds/%d' % (builder, build)
    LogVerbose('Downloading %s' % url)
    f = urllib2.urlopen(url)
    try:
      data = json.loads(f.read())
      text = data['text']
      if text == ['build', 'successful']:
        LogVerbose('  Success!')
        return data
      LogVerbose('  Not successful, trying previous build.')
    finally:
      f.close()
    build -= 1

  raise Error('Unable to find a successful build:\nBuilder: %s\nRange: [%d, %d]'
      % (builder, build - max_tries, build))
Ejemplo n.º 9
0
  def DoUninstall(self):
    with util.InstallLock(self.config):
      RemoveFile(self.GetInstallStamp())

      root = util.GetInstallRoot(self.config)
      for filename in self.Files():
        fullname = os.path.join(root, filename)
        if not os.path.lexists(fullname):
          Warn('File not found while uninstalling: %s' % fullname)
          continue
        LogVerbose('uninstall: %s' % filename)
        RemoveFile(fullname)

      RemoveFile(self.GetListFile())
Ejemplo n.º 10
0
def RunGitCmd(directory, cmd, error_ok=False):
    cmd = ['git'] + cmd
    LogVerbose('%s' % ' '.join(cmd))
    p = subprocess.Popen(cmd,
                         cwd=directory,
                         stderr=subprocess.PIPE,
                         stdout=subprocess.PIPE)
    stdout, stderr = p.communicate()
    if not error_ok and p.returncode != 0:
        if stdout:
            Log(stdout)
        if stderr:
            Log(stderr)
        raise Error('git command failed: %s' % cmd)
    Trace('git exited with %d' % p.returncode)
    return p.returncode
Ejemplo n.º 11
0
    def Extract(self):
        """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
        if self.IsGitUpstream():
            self.GitClone()
            return

        archive = self.DownloadLocation()
        if not archive:
            self.Log('Skipping extract; No upstream archive')
            return

        dest = self.GetBuildLocation()
        output_path, new_foldername = os.path.split(dest)
        util.Makedirs(output_path)

        # Check existing stamp file contents
        stamp_file = self.GetExtractStamp()
        stamp_contents = self.GetExtractStampContent()
        if os.path.exists(dest):
            if StampContentsMatch(stamp_file, stamp_contents):
                Log('Already up-to-date: %s' % util.RelPath(dest))
                return

            raise Error("Upstream archive or patch has changed.\n" +
                        "Please remove existing checkout and try again: '%s'" %
                        dest)

        util.LogHeading('Extracting')
        util.Makedirs(paths.OUT_DIR)
        tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
        try:
            ExtractArchive(archive, tmp_output_path)
            src = os.path.join(tmp_output_path, new_foldername)
            if not os.path.isdir(src):
                raise Error('Archive contents not found: %s' % src)
            LogVerbose("renaming '%s' -> '%s'" % (src, dest))
            os.rename(src, dest)
        finally:
            util.RemoveTree(tmp_output_path)

        self.RemoveStamps()
        WriteStamp(stamp_file, stamp_contents)
Ejemplo n.º 12
0
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='naclports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-p',
                        '--parallel',
                        action='store_true',
                        help='Download packages in parallel.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -L (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        naclports.SetVerbose(True)

    sdk_version = naclports.util.GetSDKVersion()
    Log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/publish' % (naclports.GS_BUCKET,
                                                    sdk_version, args.revision)
    gs_base_url = 'gs://' + base_path
    cmd = FindGsutil() + ['ls', gs_base_url]
    LogVerbose('Running: %s' % str(cmd))
    try:
        all_published = subprocess.check_output(cmd)
    except subprocess.CalledProcessError as e:
        raise naclports.Error("Command '%s' failed: %s" % (cmd, e))

    pkg_dir = re.findall(r'pkg_[\w-]+', all_published)
    for pkg in pkg_dir:
        listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib',
                                    pkg + '_' + 'listing.txt')
        if args.cache_listing and os.path.exists(listing_file):
            Log('Using pre-cached gs listing: %s' % listing_file)
            with open(listing_file) as f:
                listing = f.read()
        else:
            gs_url = gs_base_url + '/' + pkg + '/*'
            Log('Searching for packages at: %s' % gs_url)
            cmd = FindGsutil() + ['stat', gs_url]
            LogVerbose('Running: %s' % str(cmd))
            try:
                listing = subprocess.check_output(cmd)
            except subprocess.CalledProcessError as e:
                raise naclports.Error("Command '%s' failed: %s" % (cmd, e))
            if args.cache_listing:
                with open(listing_file, 'w') as f:
                    f.write(listing)
        all_files = ParseGsUtilOutput(listing)
        Log('Found %d packages [%s] for %s' %
            (len(all_files), FormatSize(sum(f.size for f in all_files)), pkg))
        DownloadFiles(pkg, all_files, not args.skip_md5, args.parallel)
    Log('Done')
    return 0