Ejemplo n.º 1
0
def cmd_pkg_uscan(package, options):
    """Use Debian's 'uscan' to check for upstream versions."""
    if not package.URL:
        return 0

    if package.VERSION not in package.URL:
        print_error('%s: uscan only works if VERSION is embedded in URL' %
                    package.NAME)
        return 0

    temp_fd, temp_file = tempfile.mkstemp('webports_watchfile')
    try:
        with os.fdopen(temp_fd, 'w') as f:
            uscan_url = package.URL.replace(package.VERSION, '(.+)')
            uscan_url = uscan_url.replace('download.sf.net', 'sf.net')
            util.log_verbose('uscan pattern: %s' % uscan_url)
            f.write("version = 3\n")
            f.write("%s\n" % uscan_url)

        cmd = [
            'uscan', '--upstream-version', package.VERSION, '--package',
            package.NAME, '--watchfile', temp_file
        ]
        util.log_verbose(' '.join(cmd))
        rtn = subprocess.call(cmd)
    finally:
        os.remove(temp_file)

    return rtn
Ejemplo n.º 2
0
  def patch(self):
    stamp_file = os.path.join(self.get_stamp_dir(), 'nacl_patch')
    src_dir = self.get_build_location()
    if self.URL is None:
      return

    if os.path.exists(stamp_file):
      self.log('Skipping patch step (cleaning source tree)')
      cmd = ['git', 'clean', '-f', '-d']
      if not util.log_level > util.LOG_INFO:
        cmd.append('-q')
      self.run_cmd(cmd)
      return

    util.log_heading('Patching')
    init_git_repo(src_dir)
    if os.path.exists(self.get_patch_file()):
      log_verbose('applying patch to: %s' % src_dir)
      cmd = ['patch', '-p1', '-g0', '--no-backup-if-mismatch']
      with open(self.get_patch_file()) as f:
        self.run_cmd(cmd, stdin=f)
      self.run_cmd(['git', 'add', '.'])
      self.run_cmd(['git', 'commit', '-m', 'Apply webports patch'])

    write_stamp(stamp_file, '')
Ejemplo n.º 3
0
def load_canned(parts):
  # Return an empty partition for the no-sharding case.
  if parts == 1:
    return [[]]
  partitions = []
  partition = []
  input_file = os.path.join(SCRIPT_DIR, 'partition%d.txt' % parts)
  log_verbose("LoadCanned: %s" % input_file)
  with open(input_file) as fh:
    for line in fh:
      if line.strip()[0] == '#':
        continue
      if line.startswith('  '):
        partition.append(line[2:].strip())
      else:
        if partition:
          partitions.append(partition)
          partition = []
  assert not partition
  assert len(partitions) == parts, partitions
  # Return a small set of packages for testing.
  if os.environ.get('TEST_BUILDBOT'):
    partitions[0] = [
        'corelibs',
        'glibc-compat',
        'nacl-spawn',
        'ncurses',
        'readline',
        'libtar',
        'zlib',
        'lua',
    ]
  return partitions
Ejemplo n.º 4
0
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='webports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-p',
                        '--parallel',
                        action='store_true',
                        help='Download packages in parallel.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -L (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        webports.set_verbose(True)

    sdk_version = webports.util.get_sdk_version()
    log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/packages' % (
        webports.GS_BUCKET, sdk_version, args.revision)
    gs_url = 'gs://' + base_path + '/*'
    listing_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'listing.txt')

    if args.cache_listing and os.path.exists(listing_file):
        log('Using pre-cached gs listing: %s' % listing_file)
        with open(listing_file) as f:
            listing = f.read()
    else:
        log('Searching for packages at: %s' % gs_url)
        cmd = find_gsutil() + ['stat', gs_url]
        log_verbose('Running: %s' % str(cmd))
        try:
            listing = subprocess.check_output(cmd)
        except subprocess.CalledProcessError as e:
            raise webports.Error("Command '%s' failed: %s" % (cmd, e))
        if args.cache_listing:
            with open(listing_file, 'w') as f:
                f.write(listing)

    all_files = parse_gs_util_output(listing)

    log('Found %d packages [%s]' %
        (len(all_files), format_size(sum(f.size for f in all_files))))

    binaries = download_files(all_files, not args.skip_md5, args.parallel)
    index_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt')
    log('Generating %s' % index_file)
    webports.package_index.write_index(index_file, binaries)
    log('Done')
    return 0
Ejemplo n.º 5
0
def cmd_pkg_uscan(package, options):
  """Use Debian's 'uscan' to check for upstream versions."""
  if not package.URL:
    return 0

  if package.VERSION not in package.URL:
    print_error('%s: uscan only works if VERSION is embedded in URL' %
                package.NAME)
    return 0

  temp_fd, temp_file = tempfile.mkstemp('webports_watchfile')
  try:
    with os.fdopen(temp_fd, 'w') as f:
      uscan_url = package.URL.replace(package.VERSION, '(.+)')
      uscan_url = uscan_url.replace('download.sf.net', 'sf.net')
      util.log_verbose('uscan pattern: %s' % uscan_url)
      f.write("version = 3\n")
      f.write("%s\n" % uscan_url)

    cmd = ['uscan', '--upstream-version', package.VERSION, '--package',
           package.NAME, '--watchfile', temp_file]
    util.log_verbose(' '.join(cmd))
    rtn = subprocess.call(cmd)
  finally:
    os.remove(temp_file)

  return rtn
Ejemplo n.º 6
0
def get_canned(index, parts):
  assert index >= 0 and index < parts, [index, parts]
  partitions = load_canned(parts)
  partitions = fixup_canned(partitions)
  log_verbose("Found %d packages for shard %d" % (len(partitions[index]),
                                                  index))
  return partitions[index]
Ejemplo n.º 7
0
def download_files(files, check_hashes=True, parallel=False):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = webports.package_index.PREBUILT_ROOT
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        file_info.name = os.path.join(download_dir, basename)
        filenames.append((file_info.name, file_info.url))
        if os.path.exists(file_info.name):
            if not check_hashes or check_hash(file_info.name, file_info.md5):
                log('Up-to-date: %s' % file_info.name)
                continue
        files_to_download.append(file_info)

    def check(file_info):
        if check_hashes and not check_hash(file_info.name, file_info.md5):
            raise webports.Error(
                'Checksum failed: %s\nExpected=%s\nActual=%s' %
                (file_info.name, file_info.md5, get_hash(file_info.name)))

    if not files_to_download:
        log('All files up-to-date')
    else:
        total_size = sum(f.size for f in files_to_download)
        log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), format_size(total_size)))

        gsutil = find_gsutil()
        if parallel:
            remaining_files = files_to_download
            num_files = 20
            while remaining_files:
                files = remaining_files[:num_files]
                remaining_files = remaining_files[num_files:]
                cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files
                                               ] + [download_dir]
                log_verbose(cmd)
                subprocess.check_call(cmd)
                for file_info in files:
                    check(file_info)
        else:
            for file_info in files_to_download:
                webports.download_file(file_info.name, file_info.url)
                check(file_info)

    return filenames
Ejemplo n.º 8
0
    def patch(self):
        stamp_file = os.path.join(self.get_stamp_dir(), 'nacl_patch')
        src_dir = self.get_build_location()
        if self.URL is None:
            return

        if os.path.exists(stamp_file):
            self.log('Skipping patch step (cleaning source tree)')
            cmd = ['git', 'clean', '-f', '-d']
            if not util.log_level > util.LOG_INFO:
                cmd.append('-q')
            self.run_cmd(cmd)
            return

        util.log_heading('Patching')
        init_git_repo(src_dir)
        if os.path.exists(self.get_patch_file()):
            log_verbose('applying patch to: %s' % src_dir)
            cmd = ['patch', '-p1', '-g0', '--no-backup-if-mismatch']
            with open(self.get_patch_file()) as f:
                self.run_cmd(cmd, stdin=f)
            self.run_cmd(['git', 'add', '.'])
            self.run_cmd(['git', 'commit', '-m', 'Apply webports patch'])

        write_stamp(stamp_file, '')
Ejemplo n.º 9
0
def load_canned(parts):
    # Return an empty partition for the no-sharding case.
    if parts == 1:
        return [[]]
    partitions = []
    partition = []
    input_file = os.path.join(SCRIPT_DIR, 'partition%d.txt' % parts)
    log_verbose("LoadCanned: %s" % input_file)
    with open(input_file) as fh:
        for line in fh:
            if line.strip()[0] == '#':
                continue
            if line.startswith('  '):
                partition.append(line[2:].strip())
            else:
                if partition:
                    partitions.append(partition)
                    partition = []
    assert not partition
    assert len(partitions) == parts, partitions
    # Return a small set of packages for testing.
    if os.environ.get('TEST_BUILDBOT'):
        partitions[0] = [
            'corelibs',
            'glibc-compat',
            'nacl-spawn',
            'ncurses',
            'readline',
            'libtar',
            'zlib',
            'lua',
        ]
    return partitions
Ejemplo n.º 10
0
def get_canned(index, parts):
    assert index >= 0 and index < parts, [index, parts]
    partitions = load_canned(parts)
    partitions = fixup_canned(partitions)
    log_verbose("Found %d packages for shard %d" %
                (len(partitions[index]), index))
    return partitions[index]
Ejemplo n.º 11
0
def download_files(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = webports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or check_hash(file_info.name, file_info.md5):
        log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def check(file_info):
    if check_hashes and not check_hash(file_info.name, file_info.md5):
      raise webports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, get_hash(file_info.name)))

  if not files_to_download:
    log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), format_size(total_size)))

    gsutil = find_gsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        log_verbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          check(file_info)
    else:
      for file_info in files_to_download:
        webports.download_file(file_info.name, file_info.url)
        check(file_info)

  return filenames
Ejemplo n.º 12
0
 def write_stamp(self):
   """Write stamp file containing pkg_info."""
   filename = util.get_install_stamp(self.NAME, self.config)
   make_dir_if_needed(filename)
   util.log_verbose('stamp: %s' % filename)
   pkg_info = self.get_pkg_info()
   with open(filename, 'w') as f:
     f.write(pkg_info)
Ejemplo n.º 13
0
def extract_archive(archive, destination):
  ext = os.path.splitext(archive)[1]
  if ext in ('.gz', '.tgz', '.bz2', '.xz'):
    cmd = ['tar', 'xf', archive, '-C', destination]
  elif ext in ('.zip',):
    cmd = ['unzip', '-q', '-d', destination, archive]
  else:
    raise Error('unhandled extension: %s' % ext)
  log_verbose(cmd)
  subprocess.check_call(cmd)
Ejemplo n.º 14
0
def extract_archive(archive, destination):
    ext = os.path.splitext(archive)[1]
    if ext in ('.gz', '.tgz', '.bz2', '.xz'):
        cmd = ['tar', 'xf', archive, '-C', destination]
    elif ext in ('.zip', ):
        cmd = ['unzip', '-q', '-d', destination, archive]
    else:
        raise Error('unhandled extension: %s' % ext)
    log_verbose(cmd)
    subprocess.check_call(cmd)
Ejemplo n.º 15
0
def main(args):
  parser = argparse.ArgumentParser(description=__doc__)
  parser.add_argument('revision', metavar='REVISION',
                      help='webports revision to to scan for.')
  parser.add_argument('-v', '--verbose', action='store_true',
                      help='Output extra information.')
  parser.add_argument('-p', '--parallel', action='store_true',
                      help='Download packages in parallel.')
  parser.add_argument('-l', '--cache-listing', action='store_true',
                      help='Cached output of gsutil -L (for testing).')
  parser.add_argument('--skip-md5', action='store_true',
                      help='Assume on-disk files are up-to-date (for testing).')
  args = parser.parse_args(args)
  if args.verbose:
    webports.set_verbose(True)

  sdk_version = webports.util.get_sdk_version()
  log('Scanning packages built for pepper_%s at revsion %s' %
      (sdk_version, args.revision))
  base_path = '%s/builds/pepper_%s/%s/publish' % (webports.GS_BUCKET,
                                                  sdk_version, args.revision)
  gs_base_url = 'gs://' + base_path
  cmd = find_gsutil() + ['ls', gs_base_url]
  log_verbose('Running: %s' % str(cmd))
  try:
    all_published = subprocess.check_output(cmd)
  except subprocess.CalledProcessError as e:
    raise webports.Error("Command '%s' failed: %s" % (cmd, e))

  pkg_dir = re.findall(r'pkg_[\w-]+', all_published)
  for pkg in pkg_dir:
    listing_file = os.path.join(webports.NACLPORTS_ROOT, 'lib',
                                pkg + '_' + 'listing.txt')
    if args.cache_listing and os.path.exists(listing_file):
      log('Using pre-cached gs listing: %s' % listing_file)
      with open(listing_file) as f:
        listing = f.read()
    else:
      gs_url = gs_base_url + '/' + pkg + '/*'
      log('Searching for packages at: %s' % gs_url)
      cmd = find_gsutil() + ['stat', gs_url]
      log_verbose('Running: %s' % str(cmd))
      try:
        listing = subprocess.check_output(cmd)
      except subprocess.CalledProcessError as e:
        raise webports.Error("Command '%s' failed: %s" % (cmd, e))
      if args.cache_listing:
        with open(listing_file, 'w') as f:
          f.write(listing)
    all_files = parse_gs_util_output(listing)
    log('Found %d packages [%s] for %s' %
        (len(all_files), format_size(sum(f.size for f in all_files)), pkg))
    download_files(pkg, all_files, not args.skip_md5, args.parallel)
  log('Done')
  return 0
Ejemplo n.º 16
0
def main(args):
  parser = argparse.ArgumentParser()
  parser.add_argument('--check', action='store_true',
                      help='check canned partition information is up-to-date.')
  parser.add_argument('-v', '--verbose', action='store_true',
                      help='Output extra information.')
  parser.add_argument('-t', '--print-canned', type=int,
                      help='Print a the canned partition list and exit.')
  parser.add_argument('-b', '--bot-prefix', help='builder name prefix.',
                      default='linux-clang-')
  parser.add_argument('-n', '--num-bots',
                      help='Number of builders on the waterfall to collect '
                      'data from or to print a canned partition for.',
                      type=int, default=5)
  parser.add_argument('-p', '--num-parts',
                      help='Number of parts to partition things into '
                      '(this will differ from --num-bots when changing the '
                      'number of shards).',
                      type=int, default=5)
  parser.add_argument('--build-number', help='Builder number to look at for '
                      'historical data on build times.', type=int, default=-1)
  options = parser.parse_args(args)
  webports.set_verbose(options.verbose)

  if options.check:
    for num_bots in xrange(1, 7):
      print('Checking partioning with %d bot(s)' % (num_bots))
      # GetCanned with raise an Error if the canned partition information is
      # bad, which in turn will trigger a non-zero return from this script.
      get_canned(0, num_bots)
    return

  if options.print_canned is not None:
    print_canned(options.print_canned, options.num_bots)
    return

  projects = Projects()
  for bot in range(options.num_bots):
    bot_name = '%s%d' % (options.bot_prefix, bot)
    log_verbose('Attempting to add data from "%s"' % bot_name)
    projects.add_data_from_builder(bot_name, options.build_number)
  projects.post_process_deps()

  parts = get_partition(projects, options.num_parts)
  for i, project_times in enumerate(parts):
    print('builder %d (total: %d)' % (i, project_times.total_time))
    project_names = project_times.topologically_sorted_project_names(projects)
    print('  %s' % '\n  '.join(project_names))

  times = list(sorted(part.total_time for part in parts))
  difference = 0
  for i in range(1, len(times)):
    difference += times[i] - times[i - 1]
  print('Difference between total time of builders: %d' % difference)
Ejemplo n.º 17
0
 def installable(self, package_name, config):
   """Returns True if the index contains the given package and it is
   installable in the currently configured SDK."""
   info = self.packages.get((package_name, config))
   if not info:
     return False
   version = util.get_sdk_version()
   if info['BUILD_SDK_VERSION'] != version:
     util.log_verbose('Prebuilt package was built with different SDK version: '
                      '%s vs %s' % (info['BUILD_SDK_VERSION'], version))
     return False
   return True
Ejemplo n.º 18
0
    def _install_files(self, force):
        dest = util.get_install_root(self.config)
        dest_tmp = os.path.join(dest, 'install_tmp')
        if os.path.exists(dest_tmp):
            shutil.rmtree(dest_tmp)

        if self.is_any_version_installed():
            raise error.Error('package already installed: %s' %
                              self.info_string())

        self.log_status('Installing')
        util.log_verbose('installing from: %s' % self.filename)
        util.makedirs(dest_tmp)

        names = []
        try:
            with tarfile.open(self.filename) as tar:
                for info in tar:
                    if info.isdir():
                        continue
                    name = posixpath.normpath(info.name)
                    if name == 'pkg_info':
                        continue
                    if not name.startswith(PAYLOAD_DIR + '/'):
                        raise error.PkgFormatError(
                            'invalid file in package: %s' % name)

                    name = name[len(PAYLOAD_DIR) + 1:]
                    names.append(name)

                if not force:
                    for name in names:
                        full_name = os.path.join(dest, name)
                        if os.path.exists(full_name):
                            raise error.Error('file already exists: %s' %
                                              full_name)

                tar.extractall(dest_tmp)
                payload_tree = os.path.join(dest_tmp, PAYLOAD_DIR)

                names = filter_out_executables(names, payload_tree)

                for name in names:
                    install_file(name, payload_tree, dest)
        finally:
            shutil.rmtree(dest_tmp)

        for name in names:
            relocate_file(name, dest)

        self.write_file_list(names)
Ejemplo n.º 19
0
def run_git_cmd(directory, cmd, error_ok=False):
  cmd = ['git'] + cmd
  log_verbose('%s' % ' '.join(cmd))
  p = subprocess.Popen(cmd, cwd=directory, stderr=subprocess.PIPE,
                       stdout=subprocess.PIPE)
  stdout, stderr = p.communicate()
  if not error_ok and p.returncode != 0:
    if stdout:
      log(stdout)
    if stderr:
      log(stderr)
    raise Error('git command failed: %s' % cmd)
  trace('git exited with %d' % p.returncode)
  return p.returncode
Ejemplo n.º 20
0
  def _install_files(self, force):
    dest = util.get_install_root(self.config)
    dest_tmp = os.path.join(dest, 'install_tmp')
    if os.path.exists(dest_tmp):
      shutil.rmtree(dest_tmp)

    if self.is_any_version_installed():
      raise error.Error('package already installed: %s' % self.info_string())

    self.log_status('Installing')
    util.log_verbose('installing from: %s' % self.filename)
    util.makedirs(dest_tmp)

    names = []
    try:
      with tarfile.open(self.filename) as tar:
        for info in tar:
          if info.isdir():
            continue
          name = posixpath.normpath(info.name)
          if name == 'pkg_info':
            continue
          if not name.startswith(PAYLOAD_DIR + '/'):
            raise error.PkgFormatError('invalid file in package: %s' % name)

          name = name[len(PAYLOAD_DIR) + 1:]
          names.append(name)

        if not force:
          for name in names:
            full_name = os.path.join(dest, name)
            if os.path.exists(full_name):
              raise error.Error('file already exists: %s' % full_name)

        tar.extractall(dest_tmp)
        payload_tree = os.path.join(dest_tmp, PAYLOAD_DIR)

        names = filter_out_executables(names, payload_tree)

        for name in names:
          install_file(name, payload_tree, dest)
    finally:
      shutil.rmtree(dest_tmp)

    for name in names:
      relocate_file(name, dest)

    self.write_file_list(names)
Ejemplo n.º 21
0
    def extract(self):
        """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
        if self.is_git_upstream():
            self.git_clone()
            return

        archive = self.download_location()
        if not archive:
            self.log('Skipping extract; No upstream archive')
            return

        dest = self.get_build_location()
        output_path, new_foldername = os.path.split(dest)
        util.makedirs(output_path)

        # Check existing stamp file contents
        stamp_file = self.get_extract_stamp()
        stamp_contents = self.get_extract_stamp_content()
        if os.path.exists(dest):
            if stamp_contents_match(stamp_file, stamp_contents):
                log('Already up-to-date: %s' % util.rel_path(dest))
                return

            raise Error("Upstream archive or patch has changed.\n" +
                        "Please remove existing checkout and try again: '%s'" %
                        dest)

        util.log_heading('Extracting')
        util.makedirs(paths.OUT_DIR)
        tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
        try:
            extract_archive(archive, tmp_output_path)
            src = os.path.join(tmp_output_path, new_foldername)
            if not os.path.isdir(src):
                raise Error('Archive contents not found: %s' % src)
            log_verbose("renaming '%s' -> '%s'" % (src, dest))
            os.rename(src, dest)
        finally:
            util.remove_tree(tmp_output_path)

        self.remove_stamps()
        write_stamp(stamp_file, stamp_contents)
Ejemplo n.º 22
0
  def extract(self):
    """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
    if self.is_git_upstream():
      self.git_clone()
      return

    archive = self.download_location()
    if not archive:
      self.log('Skipping extract; No upstream archive')
      return

    dest = self.get_build_location()
    output_path, new_foldername = os.path.split(dest)
    util.makedirs(output_path)

    # Check existing stamp file contents
    stamp_file = self.get_extract_stamp()
    stamp_contents = self.get_extract_stamp_content()
    if os.path.exists(dest):
      if stamp_contents_match(stamp_file, stamp_contents):
        log('Already up-to-date: %s' % util.rel_path(dest))
        return

      raise Error("Upstream archive or patch has changed.\n" +
                  "Please remove existing checkout and try again: '%s'" % dest)

    util.log_heading('Extracting')
    util.makedirs(paths.OUT_DIR)
    tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
    try:
      extract_archive(archive, tmp_output_path)
      src = os.path.join(tmp_output_path, new_foldername)
      if not os.path.isdir(src):
        raise Error('Archive contents not found: %s' % src)
      log_verbose("renaming '%s' -> '%s'" % (src, dest))
      os.rename(src, dest)
    finally:
      util.remove_tree(tmp_output_path)

    self.remove_stamps()
    write_stamp(stamp_file, stamp_contents)
Ejemplo n.º 23
0
  def do_uninstall(self, force):
    with util.InstallLock(self.config):
      if not force:
        for pkg in installed_package_iterator(self.config):
          if self.NAME in pkg.DEPENDS:
            raise error.Error("Unable to uninstall '%s' (depended on by '%s')" %
                              (self.NAME, pkg.NAME))
      remove_file(self.get_install_stamp())

      root = util.get_install_root(self.config)
      for filename in self.files():
        fullname = os.path.join(root, filename)
        if not os.path.lexists(fullname):
          util.warn('File not found while uninstalling: %s' % fullname)
          continue
        util.log_verbose('uninstall: %s' % filename)
        remove_file(fullname)

      if os.path.exists(self.get_list_file()):
        remove_file(self.get_list_file())
Ejemplo n.º 24
0
    def do_uninstall(self, force):
        with util.InstallLock(self.config):
            if not force:
                for pkg in installed_package_iterator(self.config):
                    if self.NAME in pkg.DEPENDS:
                        raise error.Error(
                            "Unable to uninstall '%s' (depended on by '%s')" %
                            (self.NAME, pkg.NAME))
            remove_file(self.get_install_stamp())

            root = util.get_install_root(self.config)
            for filename in self.files():
                fullname = os.path.join(root, filename)
                if not os.path.lexists(fullname):
                    util.warn('File not found while uninstalling: %s' %
                              fullname)
                    continue
                util.log_verbose('uninstall: %s' % filename)
                remove_file(fullname)

            if os.path.exists(self.get_list_file()):
                remove_file(self.get_list_file())
Ejemplo n.º 25
0
def install_file(filename, old_root, new_root):
  """Install a single file by moving it into a new location.

  Args:
    filename: Relative name of file to install.
    old_root: The current location of the file.
    new_root: The new desired root for the file.
  """
  oldname = os.path.join(old_root, filename)

  util.log_verbose('install: %s' % filename)

  newname = os.path.join(new_root, filename)
  dirname = os.path.dirname(newname)
  if not os.path.isdir(dirname):
    util.makedirs(dirname)
  os.rename(oldname, newname)

  # When install binaries ELF files into the toolchain direcoties, remove
  # the X bit so that they do not found when searching the PATH.
  if util.is_elf_file(newname) or util.is_pexe_file(newname):
    mode = os.stat(newname).st_mode
    mode = mode & ~(stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
    os.chmod(newname, mode)
Ejemplo n.º 26
0
def download_data_from_builder(builder, build):
  max_tries = 30

  for _ in xrange(max_tries):
    url = 'http://build.chromium.org/p/client.nacl.ports/json'
    url += '/builders/%s/builds/%d' % (builder, build)
    log_verbose('Downloading %s' % url)
    f = urllib2.urlopen(url)
    try:
      data = json.loads(f.read())
      text = data['text']
      if text == ['build', 'successful']:
        log_verbose('  Success!')
        return data
      log_verbose('  Not successful, trying previous build.')
    finally:
      f.close()
    build -= 1

  raise Error('Unable to find a successful build:\nBuilder: %s\nRange: [%d, %d]'
              % (builder, build - max_tries, build))
Ejemplo n.º 27
0
def download_data_from_builder(builder, build):
    max_tries = 30

    for _ in xrange(max_tries):
        url = 'http://build.chromium.org/p/client.nacl.ports/json'
        url += '/builders/%s/builds/%d' % (builder, build)
        log_verbose('Downloading %s' % url)
        f = urllib2.urlopen(url)
        try:
            data = json.loads(f.read())
            text = data['text']
            if text == ['build', 'successful']:
                log_verbose('  Success!')
                return data
            log_verbose('  Not successful, trying previous build.')
        finally:
            f.close()
        build -= 1

    raise Error(
        'Unable to find a successful build:\nBuilder: %s\nRange: [%d, %d]' %
        (builder, build - max_tries, build))
Ejemplo n.º 28
0
def extract_archive(archive, destination):
    log_verbose('extracting archive: {} to {}'.format(archive, destination))
    shutil.unpack_archive(archive, destination)
Ejemplo n.º 29
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--check',
        action='store_true',
        help='check canned partition information is up-to-date.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-t',
                        '--print-canned',
                        type=int,
                        help='Print a the canned partition list and exit.')
    parser.add_argument('-b',
                        '--bot-prefix',
                        help='builder name prefix.',
                        default='linux-clang-')
    parser.add_argument('-n',
                        '--num-bots',
                        help='Number of builders on the waterfall to collect '
                        'data from or to print a canned partition for.',
                        type=int,
                        default=5)
    parser.add_argument('-p',
                        '--num-parts',
                        help='Number of parts to partition things into '
                        '(this will differ from --num-bots when changing the '
                        'number of shards).',
                        type=int,
                        default=5)
    parser.add_argument('--build-number',
                        help='Builder number to look at for '
                        'historical data on build times.',
                        type=int,
                        default=-1)
    options = parser.parse_args(args)
    webports.set_verbose(options.verbose)

    if options.check:
        for num_bots in xrange(1, 7):
            print('Checking partioning with %d bot(s)' % (num_bots))
            # GetCanned with raise an Error if the canned partition information is
            # bad, which in turn will trigger a non-zero return from this script.
            get_canned(0, num_bots)
        return

    if options.print_canned is not None:
        print_canned(options.print_canned, options.num_bots)
        return

    projects = Projects()
    for bot in range(options.num_bots):
        bot_name = '%s%d' % (options.bot_prefix, bot)
        log_verbose('Attempting to add data from "%s"' % bot_name)
        projects.add_data_from_builder(bot_name, options.build_number)
    projects.post_process_deps()

    parts = get_partition(projects, options.num_parts)
    for i, project_times in enumerate(parts):
        print('builder %d (total: %d)' % (i, project_times.total_time))
        project_names = project_times.topologically_sorted_project_names(
            projects)
        print('  %s' % '\n  '.join(project_names))

    times = list(sorted(part.total_time for part in parts))
    difference = 0
    for i in range(1, len(times)):
        difference += times[i] - times[i - 1]
    print('Difference between total time of builders: %d' % difference)