Example #1
0
  def create_pkg_file(self):
    """Create and pkg file for use with the FreeBSD pkg tool.

    Create a package from the result of the package's InstallStep.
    """
    install_dir = self.get_install_location()
    if not os.path.exists(install_dir):
      log('Skiping pkg creation. Install dir not found: %s' % install_dir)
      return

    # Strip all elf or pexe files in the install directory (except .o files
    # since we don't want to strip, for example, crt1.o)
    if not self.config.debug and self.config.toolchain != 'emscripten':
      strip = util.get_strip(self.config)
      for root, _, files in os.walk(install_dir):
        for filename in files:
          fullname = os.path.join(root, filename)
          if (os.path.isfile(fullname) and util.is_elf_file(fullname) and
              os.path.splitext(fullname)[1] != '.o'):
            log('stripping: %s %s' % (strip, fullname))
            subprocess.check_call([strip, fullname])

    abi = 'pkg_' + self.config.toolchain
    if self.config.arch != self.config.toolchain:
      abi += "_" + util.arch_to_pkgarch[self.config.arch]
    abi_dir = os.path.join(paths.PUBLISH_ROOT, abi)
    pkg_file = os.path.join(abi_dir, '%s-%s.tbz' % (self.NAME, self.VERSION))
    util.makedirs(abi_dir)
    deps = self.DEPENDS
    if self.config.toolchain != 'glibc':
      deps = []
    bsd_pkg.create_pkg_file(self.NAME, self.VERSION, self.config.arch,
                            self.get_install_location(), pkg_file, deps)
Example #2
0
    def create_pkg_file(self):
        """Create and pkg file for use with the FreeBSD pkg tool.

    Create a package from the result of the package's InstallStep.
    """
        install_dir = self.get_install_location()
        if not os.path.exists(install_dir):
            log('Skiping pkg creation. Install dir not found: %s' %
                install_dir)
            return

        # Strip all elf or pexe files in the install directory (except .o files
        # since we don't want to strip, for example, crt1.o)
        if not self.config.debug and self.config.toolchain != 'emscripten':
            strip = util.get_strip(self.config)
            for root, _, files in os.walk(install_dir):
                for filename in files:
                    fullname = os.path.join(root, filename)
                    if (os.path.isfile(fullname) and util.is_elf_file(fullname)
                            and os.path.splitext(fullname)[1] != '.o'):
                        log('stripping: %s %s' % (strip, fullname))
                        subprocess.check_call([strip, fullname])

        abi = 'pkg_' + self.config.toolchain
        if self.config.arch != self.config.toolchain:
            abi += "_" + util.arch_to_pkgarch[self.config.arch]
        abi_dir = os.path.join(paths.PUBLISH_ROOT, abi)
        pkg_file = os.path.join(abi_dir,
                                '%s-%s.tbz' % (self.NAME, self.VERSION))
        util.makedirs(abi_dir)
        deps = self.DEPENDS
        if self.config.toolchain != 'glibc':
            deps = []
        bsd_pkg.create_pkg_file(self.NAME, self.VERSION, self.config.arch,
                                self.get_install_location(), pkg_file, deps)
Example #3
0
 def do_cmd(package):
   try:
     pkg_commands[args.command](package, args)
   except error.DisabledError as e:
     if args.ignore_disabled:
       util.log('webports: %s' % e)
     else:
       raise e
Example #4
0
def write_stamp(stamp_file, stamp_contents):
  """Write a stamp file to disk with the given file contents."""
  stamp_dir = os.path.dirname(stamp_file)
  util.makedirs(stamp_dir)

  with open(stamp_file, 'w') as f:
    f.write(stamp_contents)
  log('Wrote stamp: %s' % stamp_file)
Example #5
0
 def do_cmd(package):
     try:
         pkg_commands[args.command](package, args)
     except error.DisabledError as e:
         if args.ignore_disabled:
             util.log('webports: %s' % e)
         else:
             raise e
Example #6
0
def write_stamp(stamp_file, stamp_contents):
    """Write a stamp file to disk with the given file contents."""
    stamp_dir = os.path.dirname(stamp_file)
    util.makedirs(stamp_dir)

    with open(stamp_file, 'w') as f:
        f.write(stamp_contents)
    log('Wrote stamp: %s' % stamp_file)
Example #7
0
  def clean(self):
    pkg = self.package_file()
    log('removing %s' % pkg)
    if os.path.exists(pkg):
      os.remove(pkg)

    stamp_dir = os.path.join(paths.STAMP_DIR, self.NAME)
    log('removing %s' % stamp_dir)
    util.remove_tree(stamp_dir)
Example #8
0
    def clean(self):
        pkg = self.package_file()
        log('removing %s' % pkg)
        if os.path.exists(pkg):
            os.remove(pkg)

        stamp_dir = os.path.join(paths.STAMP_DIR, self.NAME)
        log('removing %s' % stamp_dir)
        util.remove_tree(stamp_dir)
Example #9
0
def main(args):
  parser = argparse.ArgumentParser(description=__doc__)
  parser.add_argument('revision', metavar='REVISION',
                      help='webports revision to to scan for.')
  parser.add_argument('-v', '--verbose', action='store_true',
                      help='Output extra information.')
  parser.add_argument('-p', '--parallel', action='store_true',
                      help='Download packages in parallel.')
  parser.add_argument('-l', '--cache-listing', action='store_true',
                      help='Cached output of gsutil -L (for testing).')
  parser.add_argument('--skip-md5', action='store_true',
                      help='Assume on-disk files are up-to-date (for testing).')
  args = parser.parse_args(args)
  if args.verbose:
    webports.set_verbose(True)

  sdk_version = webports.util.get_sdk_version()
  log('Scanning packages built for pepper_%s at revsion %s' %
      (sdk_version, args.revision))
  base_path = '%s/builds/pepper_%s/%s/publish' % (webports.GS_BUCKET,
                                                  sdk_version, args.revision)
  gs_base_url = 'gs://' + base_path
  cmd = find_gsutil() + ['ls', gs_base_url]
  log_verbose('Running: %s' % str(cmd))
  try:
    all_published = subprocess.check_output(cmd)
  except subprocess.CalledProcessError as e:
    raise webports.Error("Command '%s' failed: %s" % (cmd, e))

  pkg_dir = re.findall(r'pkg_[\w-]+', all_published)
  for pkg in pkg_dir:
    listing_file = os.path.join(webports.NACLPORTS_ROOT, 'lib',
                                pkg + '_' + 'listing.txt')
    if args.cache_listing and os.path.exists(listing_file):
      log('Using pre-cached gs listing: %s' % listing_file)
      with open(listing_file) as f:
        listing = f.read()
    else:
      gs_url = gs_base_url + '/' + pkg + '/*'
      log('Searching for packages at: %s' % gs_url)
      cmd = find_gsutil() + ['stat', gs_url]
      log_verbose('Running: %s' % str(cmd))
      try:
        listing = subprocess.check_output(cmd)
      except subprocess.CalledProcessError as e:
        raise webports.Error("Command '%s' failed: %s" % (cmd, e))
      if args.cache_listing:
        with open(listing_file, 'w') as f:
          f.write(listing)
    all_files = parse_gs_util_output(listing)
    log('Found %d packages [%s] for %s' %
        (len(all_files), format_size(sum(f.size for f in all_files)), pkg))
    download_files(pkg, all_files, not args.skip_md5, args.parallel)
  log('Done')
  return 0
Example #10
0
def cmd_pkg_check(package, options):
  """Verify dependency information for given package(s)"""
  # The fact that we got this far means the pkg_info is basically valid.
  # This final check verifies the dependencies are valid.
  # Cache the list of all packages names since this function could be called
  # a lot in the case of "webports check --all".
  packages = source_package.source_package_iterator()
  if cmd_pkg_check.all_pkg_names is None:
    cmd_pkg_check.all_pkg_names = [os.path.basename(p.root) for p in packages]
  util.log("Checking deps for %s .." % package.NAME)
  package.check_deps(cmd_pkg_check.all_package_names)
Example #11
0
    def check_deps(self, valid_packages):
        for package in self.DEPENDS:
            if package not in valid_packages:
                util.log('%s: Invalid dependency: %s' % (self.info, package))
                return False

        for package in self.CONFLICTS:
            if package not in valid_packages:
                util.log('%s: Invalid conflict: %s' % (self.info, package))
                return False

        return True
Example #12
0
  def check_deps(self, valid_packages):
    for package in self.DEPENDS:
      if package not in valid_packages:
        util.log('%s: Invalid dependency: %s' % (self.info, package))
        return False

    for package in self.CONFLICTS:
      if package not in valid_packages:
        util.log('%s: Invalid conflict: %s' % (self.info, package))
        return False

    return True
Example #13
0
def run_git_cmd(directory, cmd, error_ok=False):
  cmd = ['git'] + cmd
  log_verbose('%s' % ' '.join(cmd))
  p = subprocess.Popen(cmd, cwd=directory, stderr=subprocess.PIPE,
                       stdout=subprocess.PIPE)
  stdout, stderr = p.communicate()
  if not error_ok and p.returncode != 0:
    if stdout:
      log(stdout)
    if stderr:
      log(stderr)
    raise Error('git command failed: %s' % cmd)
  trace('git exited with %d' % p.returncode)
  return p.returncode
Example #14
0
 def download(self, package_name, config):
   if not os.path.exists(PREBUILT_ROOT):
     util.makedirs(PREBUILT_ROOT)
   info = self.packages[(package_name, config)]
   filename = os.path.join(PREBUILT_ROOT, os.path.basename(info['BIN_URL']))
   if os.path.exists(filename):
     try:
       util.verify_hash(filename, info['BIN_SHA1'])
       return filename
     except util.HashVerificationError:
       pass
   util.log('Downloading prebuilt binary ...')
   util.download_file(filename, info['BIN_URL'])
   util.verify_hash(filename, info['BIN_SHA1'])
   return filename
Example #15
0
def create_pkg_file(name, version, arch, payload_dir, outfile, depends):
    """Create an archive file in FreeBSD's pkg file format"""
    util.log('Creating pkg package: %s' % outfile)
    manifest = collections.OrderedDict()
    manifest['name'] = name
    manifest['version'] = version
    manifest['arch'] = 'nacl:0:%s' % arch

    # The following fields are required by 'pkg' but we don't have
    # meaningful values for them yet
    manifest['origin'] = name,
    manifest['comment'] = 'comment not available'
    manifest['desc'] = 'desc not available'
    manifest['maintainer'] = '*****@*****.**'
    manifest['www'] = 'https://chromium.googlesource.com/webports'
    manifest['prefix'] = INSTALL_PREFIX

    if depends:
        depends_dict = collections.OrderedDict()
        create_dependencies(depends_dict, depends)
        manifest['deps'] = depends_dict

    temp_dir = os.path.splitext(outfile)[0] + '.tmp'
    if os.path.exists(temp_dir):
        shutil.rmtree(temp_dir)
    os.mkdir(temp_dir)

    content_dir = os.path.join(temp_dir, INSTALL_PREFIX)
    shutil.copytree(payload_dir, content_dir, symlinks=True)
    write_ucl(os.path.join(temp_dir, '+COMPACT_MANIFEST'), manifest)
    file_dict = collections.OrderedDict()
    parse_dir(temp_dir, file_dict, '/')
    manifest['files'] = file_dict
    write_ucl(os.path.join(temp_dir, '+MANIFEST'), manifest)

    with tarfile.open(outfile, 'w:bz2') as tar:
        for filename in os.listdir(temp_dir):
            if filename.startswith('+'):
                fullname = os.path.join(temp_dir, filename)
                tar.add(fullname, arcname=filename)

        for filename in os.listdir(temp_dir):
            if not filename.startswith('+'):
                fullname = os.path.join(temp_dir, filename)
                add_files_in_dir(fullname, tar, temp_dir)
    shutil.rmtree(temp_dir)
Example #16
0
def create_pkg_file(name, version, arch, payload_dir, outfile, depends):
  """Create an archive file in FreeBSD's pkg file format"""
  util.log('Creating pkg package: %s' % outfile)
  manifest = collections.OrderedDict()
  manifest['name'] = name
  manifest['version'] = version
  manifest['arch'] = 'nacl:0:%s' % arch

  # The following fields are required by 'pkg' but we don't have
  # meaningful values for them yet
  manifest['origin'] = name,
  manifest['comment'] = 'comment not available'
  manifest['desc'] = 'desc not available'
  manifest['maintainer'] = '*****@*****.**'
  manifest['www'] = 'https://chromium.googlesource.com/webports'
  manifest['prefix'] = INSTALL_PREFIX

  if depends:
    depends_dict = collections.OrderedDict()
    create_dependencies(depends_dict, depends)
    manifest['deps'] = depends_dict

  temp_dir = os.path.splitext(outfile)[0] + '.tmp'
  if os.path.exists(temp_dir):
    shutil.rmtree(temp_dir)
  os.mkdir(temp_dir)

  content_dir = os.path.join(temp_dir, INSTALL_PREFIX)
  shutil.copytree(payload_dir, content_dir, symlinks=True)
  write_ucl(os.path.join(temp_dir, '+COMPACT_MANIFEST'), manifest)
  file_dict = collections.OrderedDict()
  parse_dir(temp_dir, file_dict, '/')
  manifest['files'] = file_dict
  write_ucl(os.path.join(temp_dir, '+MANIFEST'), manifest)

  with tarfile.open(outfile, 'w:bz2') as tar:
    for filename in os.listdir(temp_dir):
      if filename.startswith('+'):
        fullname = os.path.join(temp_dir, filename)
        tar.add(fullname, arcname=filename)

    for filename in os.listdir(temp_dir):
      if not filename.startswith('+'):
        fullname = os.path.join(temp_dir, filename)
        add_files_in_dir(fullname, tar, temp_dir)
  shutil.rmtree(temp_dir)
Example #17
0
    def extract(self):
        """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
        if self.is_git_upstream():
            self.git_clone()
            return

        archive = self.download_location()
        if not archive:
            self.log('Skipping extract; No upstream archive')
            return

        dest = self.get_build_location()
        output_path, new_foldername = os.path.split(dest)
        util.makedirs(output_path)

        # Check existing stamp file contents
        stamp_file = self.get_extract_stamp()
        stamp_contents = self.get_extract_stamp_content()
        if os.path.exists(dest):
            if stamp_contents_match(stamp_file, stamp_contents):
                log('Already up-to-date: %s' % util.rel_path(dest))
                return

            raise Error("Upstream archive or patch has changed.\n" +
                        "Please remove existing checkout and try again: '%s'" %
                        dest)

        util.log_heading('Extracting')
        util.makedirs(paths.OUT_DIR)
        tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
        try:
            extract_archive(archive, tmp_output_path)
            src = os.path.join(tmp_output_path, new_foldername)
            if not os.path.isdir(src):
                raise Error('Archive contents not found: %s' % src)
            log_verbose("renaming '%s' -> '%s'" % (src, dest))
            os.rename(src, dest)
        finally:
            util.remove_tree(tmp_output_path)

        self.remove_stamps()
        write_stamp(stamp_file, stamp_contents)
Example #18
0
  def extract(self):
    """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
    if self.is_git_upstream():
      self.git_clone()
      return

    archive = self.download_location()
    if not archive:
      self.log('Skipping extract; No upstream archive')
      return

    dest = self.get_build_location()
    output_path, new_foldername = os.path.split(dest)
    util.makedirs(output_path)

    # Check existing stamp file contents
    stamp_file = self.get_extract_stamp()
    stamp_contents = self.get_extract_stamp_content()
    if os.path.exists(dest):
      if stamp_contents_match(stamp_file, stamp_contents):
        log('Already up-to-date: %s' % util.rel_path(dest))
        return

      raise Error("Upstream archive or patch has changed.\n" +
                  "Please remove existing checkout and try again: '%s'" % dest)

    util.log_heading('Extracting')
    util.makedirs(paths.OUT_DIR)
    tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
    try:
      extract_archive(archive, tmp_output_path)
      src = os.path.join(tmp_output_path, new_foldername)
      if not os.path.isdir(src):
        raise Error('Archive contents not found: %s' % src)
      log_verbose("renaming '%s' -> '%s'" % (src, dest))
      os.rename(src, dest)
    finally:
      util.remove_tree(tmp_output_path)

    self.remove_stamps()
    write_stamp(stamp_file, stamp_contents)
Example #19
0
    def download(self, force_mirror=None):
        """Download upstream sources and verify integrity."""
        if self.is_git_upstream():
            self.git_clone_to_mirror()
            return

        archive = self.download_location()
        if not archive:
            return

        if force_mirror is None:
            force_mirror = os.environ.get('FORCE_MIRROR', False)
        self.download_archive(force_mirror=force_mirror)

        if self.SHA1 is None:
            raise PkgFormatError('missing SHA1 attribute: %s' % self.info)

        util.verify_hash(archive, self.SHA1)
        log('verified: %s' % util.rel_path(archive))
Example #20
0
  def download(self, force_mirror=None):
    """Download upstream sources and verify integrity."""
    if self.is_git_upstream():
      self.git_clone_to_mirror()
      return

    archive = self.download_location()
    if not archive:
      return

    if force_mirror is None:
      force_mirror = os.environ.get('FORCE_MIRROR', False)
    self.download_archive(force_mirror=force_mirror)

    if self.SHA1 is None:
      raise PkgFormatError('missing SHA1 attribute: %s' % self.info)

    util.verify_hash(archive, self.SHA1)
    log('verified: %s' % util.rel_path(archive))
Example #21
0
def init_git_repo(directory):
    """Initialize the source git repository for a given package directory.

  This function works for unpacked tar files as well as cloned git
  repositories.  It sets up an 'upstream' branch pointing and the
  pristine upstream sources and a 'master' branch will contain changes
  specific to webports (normally the result of applying nacl.patch).

  Args:
    directory: Directory containing unpacked package sources.
  """
    git_dir = os.path.join(directory, '.git')

    # If the upstream ref exists then we've already initialized this repo
    if os.path.exists(os.path.join(git_dir, 'refs', 'heads', 'upstream')):
        return

    if os.path.exists(git_dir):
        log('Init existing git repo: %s' % directory)
        run_git_cmd(directory, ['checkout', '-b', 'placeholder'])
        run_git_cmd(directory, ['branch', '-D', 'upstream'], error_ok=True)
        run_git_cmd(directory, ['branch', '-D', 'master'], error_ok=True)
        run_git_cmd(directory, ['checkout', '-b', 'upstream'])
        run_git_cmd(directory, ['checkout', '-b', 'master'])
        run_git_cmd(directory, ['branch', '-D', 'placeholder'])
    else:
        log('Init new git repo: %s' % directory)
        run_git_cmd(directory, ['init'])
        try:
            # Setup a bogus identity on the buildbots.
            if os.environ.get('BUILDBOT_BUILDERNAME'):
                run_git_cmd(directory, ['config', 'user.name', 'Naclports'])
                run_git_cmd(directory,
                            ['config', 'user.email', '*****@*****.**'])
            run_git_cmd(directory, ['add', '-f', '.'])
            run_git_cmd(directory, ['commit', '-m', 'Upstream version'])
            run_git_cmd(directory, ['checkout', '-b', 'upstream'])
            run_git_cmd(directory, ['checkout', 'master'])
        except:  # pylint: disable=bare-except
            # If git setup fails or is interrupted then remove the partially
            # initialized repository.
            util.remove_tree(os.path.join(git_dir))
Example #22
0
def init_git_repo(directory):
  """Initialize the source git repository for a given package directory.

  This function works for unpacked tar files as well as cloned git
  repositories.  It sets up an 'upstream' branch pointing and the
  pristine upstream sources and a 'master' branch will contain changes
  specific to webports (normally the result of applying nacl.patch).

  Args:
    directory: Directory containing unpacked package sources.
  """
  git_dir = os.path.join(directory, '.git')

  # If the upstream ref exists then we've already initialized this repo
  if os.path.exists(os.path.join(git_dir, 'refs', 'heads', 'upstream')):
    return

  if os.path.exists(git_dir):
    log('Init existing git repo: %s' % directory)
    run_git_cmd(directory, ['checkout', '-b', 'placeholder'])
    run_git_cmd(directory, ['branch', '-D', 'upstream'], error_ok=True)
    run_git_cmd(directory, ['branch', '-D', 'master'], error_ok=True)
    run_git_cmd(directory, ['checkout', '-b', 'upstream'])
    run_git_cmd(directory, ['checkout', '-b', 'master'])
    run_git_cmd(directory, ['branch', '-D', 'placeholder'])
  else:
    log('Init new git repo: %s' % directory)
    run_git_cmd(directory, ['init'])
    try:
      # Setup a bogus identity on the buildbots.
      if os.environ.get('BUILDBOT_BUILDERNAME'):
        run_git_cmd(directory, ['config', 'user.name', 'Naclports'])
        run_git_cmd(directory, ['config', 'user.email', '*****@*****.**'])
      run_git_cmd(directory, ['add', '-f', '.'])
      run_git_cmd(directory, ['commit', '-m', 'Upstream version'])
      run_git_cmd(directory, ['checkout', '-b', 'upstream'])
      run_git_cmd(directory, ['checkout', 'master'])
    except:  # pylint: disable=bare-except
      # If git setup fails or is interrupted then remove the partially
      # initialized repository.
      util.remove_tree(os.path.join(git_dir))
Example #23
0
def download_files(files, check_hashes=True, parallel=False):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = webports.package_index.PREBUILT_ROOT
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        file_info.name = os.path.join(download_dir, basename)
        filenames.append((file_info.name, file_info.url))
        if os.path.exists(file_info.name):
            if not check_hashes or check_hash(file_info.name, file_info.md5):
                log('Up-to-date: %s' % file_info.name)
                continue
        files_to_download.append(file_info)

    def check(file_info):
        if check_hashes and not check_hash(file_info.name, file_info.md5):
            raise webports.Error(
                'Checksum failed: %s\nExpected=%s\nActual=%s' %
                (file_info.name, file_info.md5, get_hash(file_info.name)))

    if not files_to_download:
        log('All files up-to-date')
    else:
        total_size = sum(f.size for f in files_to_download)
        log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), format_size(total_size)))

        gsutil = find_gsutil()
        if parallel:
            remaining_files = files_to_download
            num_files = 20
            while remaining_files:
                files = remaining_files[:num_files]
                remaining_files = remaining_files[num_files:]
                cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files
                                               ] + [download_dir]
                log_verbose(cmd)
                subprocess.check_call(cmd)
                for file_info in files:
                    check(file_info)
        else:
            for file_info in files_to_download:
                webports.download_file(file_info.name, file_info.url)
                check(file_info)

    return filenames
Example #24
0
def download_files(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = webports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or check_hash(file_info.name, file_info.md5):
        log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def check(file_info):
    if check_hashes and not check_hash(file_info.name, file_info.md5):
      raise webports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, get_hash(file_info.name)))

  if not files_to_download:
    log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), format_size(total_size)))

    gsutil = find_gsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        log_verbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          check(file_info)
    else:
      for file_info in files_to_download:
        webports.download_file(file_info.name, file_info.url)
        check(file_info)

  return filenames
Example #25
0
  def git_clone_to_mirror(self):
    """Clone the upstream git repo into a local mirror. """
    git_url, git_commit = self.URL.split('@', 2)

    # Clone upstream git repo into local mirror, or update the existing
    # mirror.
    git_mirror = git_url.split('://', 2)[1]
    git_mirror = git_mirror.replace('/', '_')
    mirror_dir = os.path.join(paths.CACHE_ROOT, git_mirror)
    if os.path.exists(mirror_dir):
      if run_git_cmd(mirror_dir, ['rev-parse', git_commit + '^{commit}'],
                     error_ok=True) != 0:
        log('Updating git mirror: %s' % util.rel_path(mirror_dir))
        run_git_cmd(mirror_dir, ['remote', 'update', '--prune'])
    else:
      log('Mirroring upstream git repo: %s' % self.URL)
      run_git_cmd(paths.CACHE_ROOT, ['clone', '--mirror', git_url, git_mirror])
    log('git mirror up-to-date: %s' % util.rel_path(mirror_dir))
    return mirror_dir, git_commit
Example #26
0
    def update_patch(self):
        if self.URL is None:
            return

        git_dir = self.get_build_location()
        if not os.path.exists(git_dir):
            raise Error('Source directory not found: %s' % git_dir)

        try:
            diff = subprocess.check_output(
                ['git', 'diff', 'upstream', '--no-ext-diff'], cwd=git_dir)
        except subprocess.CalledProcessError as e:
            raise Error('error running git in %s: %s' % (git_dir, str(e)))

        # Drop index lines for a more stable diff.
        diff = re.sub('\nindex [^\n]+\n', '\n', diff)

        # Drop binary files, as they don't work anyhow.
        diff = re.sub(
            'diff [^\n]+\n'
            '(new file [^\n]+\n)?'
            '(deleted file mode [^\n]+\n)?'
            'Binary files [^\n]+ differ\n', '', diff)

        # Always filter out config.sub changes
        diff_skip = ['*config.sub']

        # Add optional per-port skip list.
        diff_skip_file = os.path.join(self.root, 'diff_skip.txt')
        if os.path.exists(diff_skip_file):
            with open(diff_skip_file) as f:
                diff_skip += f.read().splitlines()

        new_diff = ''
        skipping = False
        for line in diff.splitlines():
            if line.startswith('diff --git a/'):
                filename = line[len('diff --git a/'):].split()[0]
                skipping = False
                for skip in diff_skip:
                    if fnmatch.fnmatch(filename, skip):
                        skipping = True
                        break
            if not skipping:
                new_diff += line + '\n'
        diff = new_diff

        # Write back out the diff.
        patch_path = self.get_patch_file()
        preexisting = os.path.exists(patch_path)

        if not diff:
            if preexisting:
                log('removing patch file: %s' % util.rel_path(patch_path))
                os.remove(patch_path)
            else:
                log('no patch required: %s' % util.rel_path(git_dir))
            return

        if preexisting:
            with open(patch_path) as f:
                if diff == f.read():
                    log('patch unchanged: %s' % util.rel_path(patch_path))
                    return

        with open(patch_path, 'w') as f:
            f.write(diff)

        if preexisting:
            log('created patch: %s' % util.rel_path(patch_path))
        else:
            log('updated patch: %s' % util.rel_path(patch_path))
Example #27
0
 def log(self, message):
     log('%s: %s' % (message, self.info_string()))
Example #28
0
  def update_patch(self):
    if self.URL is None:
      return

    git_dir = self.get_build_location()
    if not os.path.exists(git_dir):
      raise Error('Source directory not found: %s' % git_dir)

    try:
      diff = subprocess.check_output(
          ['git', 'diff', 'upstream', '--no-ext-diff'], cwd=git_dir)
    except subprocess.CalledProcessError as e:
      raise Error('error running git in %s: %s' % (git_dir, str(e)))

    # Drop index lines for a more stable diff.
    diff = re.sub('\nindex [^\n]+\n', '\n', diff)

    # Drop binary files, as they don't work anyhow.
    diff = re.sub('diff [^\n]+\n'
                  '(new file [^\n]+\n)?'
                  '(deleted file mode [^\n]+\n)?'
                  'Binary files [^\n]+ differ\n', '', diff)

    # Always filter out config.sub changes
    diff_skip = ['*config.sub']

    # Add optional per-port skip list.
    diff_skip_file = os.path.join(self.root, 'diff_skip.txt')
    if os.path.exists(diff_skip_file):
      with open(diff_skip_file) as f:
        diff_skip += f.read().splitlines()

    new_diff = ''
    skipping = False
    for line in diff.splitlines():
      if line.startswith('diff --git a/'):
        filename = line[len('diff --git a/'):].split()[0]
        skipping = False
        for skip in diff_skip:
          if fnmatch.fnmatch(filename, skip):
            skipping = True
            break
      if not skipping:
        new_diff += line + '\n'
    diff = new_diff

    # Write back out the diff.
    patch_path = self.get_patch_file()
    preexisting = os.path.exists(patch_path)

    if not diff:
      if preexisting:
        log('removing patch file: %s' % util.rel_path(patch_path))
        os.remove(patch_path)
      else:
        log('no patch required: %s' % util.rel_path(git_dir))
      return

    if preexisting:
      with open(patch_path) as f:
        if diff == f.read():
          log('patch unchanged: %s' % util.rel_path(patch_path))
          return

    with open(patch_path, 'w') as f:
      f.write(diff)

    if preexisting:
      log('created patch: %s' % util.rel_path(patch_path))
    else:
      log('updated patch: %s' % util.rel_path(patch_path))
Example #29
0
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='webports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-p',
                        '--parallel',
                        action='store_true',
                        help='Download packages in parallel.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -L (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        webports.set_verbose(True)

    sdk_version = webports.util.get_sdk_version()
    log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/packages' % (
        webports.GS_BUCKET, sdk_version, args.revision)
    gs_url = 'gs://' + base_path + '/*'
    listing_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'listing.txt')

    if args.cache_listing and os.path.exists(listing_file):
        log('Using pre-cached gs listing: %s' % listing_file)
        with open(listing_file) as f:
            listing = f.read()
    else:
        log('Searching for packages at: %s' % gs_url)
        cmd = find_gsutil() + ['stat', gs_url]
        log_verbose('Running: %s' % str(cmd))
        try:
            listing = subprocess.check_output(cmd)
        except subprocess.CalledProcessError as e:
            raise webports.Error("Command '%s' failed: %s" % (cmd, e))
        if args.cache_listing:
            with open(listing_file, 'w') as f:
                f.write(listing)

    all_files = parse_gs_util_output(listing)

    log('Found %d packages [%s]' %
        (len(all_files), format_size(sum(f.size for f in all_files))))

    binaries = download_files(all_files, not args.skip_md5, args.parallel)
    index_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt')
    log('Generating %s' % index_file)
    webports.package_index.write_index(index_file, binaries)
    log('Done')
    return 0
Example #30
0
 def rmtree(path):
   util.log('removing %s' % path)
   util.remove_tree(path)
Example #31
0
 def rmtree(path):
     util.log('removing %s' % path)
     util.remove_tree(path)
Example #32
0
 def log(self, message):
   log('%s: %s' % (message, self.info_string()))