コード例 #1
0
  def CreatePkgFile(self):
    """Create and pkg file for use with the FreeBSD pkg tool.

    Create a package from the result of the package's InstallStep.
    """
    install_dir = self.GetInstallLocation()
    if not os.path.exists(install_dir):
      Log('Skiping pkg creation. Install dir not found: %s' % install_dir)
      return

    # Strip all elf or pexe files in the install directory (except .o files
    # since we don't want to strip, for example, crt1.o)
    if not self.config.debug and self.config.toolchain != 'emscripten':
      strip = util.GetStrip(self.config)
      for root, _, files in os.walk(install_dir):
        for filename in files:
          fullname = os.path.join(root, filename)
          if (os.path.isfile(fullname) and util.IsElfFile(fullname)
              and os.path.splitext(fullname)[1] != '.o'):
            Log('stripping: %s %s' % (strip, fullname))
            subprocess.check_call([strip, fullname])

    abi = 'pkg_' + self.config.toolchain
    if self.config.arch != self.config.toolchain:
      abi += "_" + util.arch_to_pkgarch[self.config.arch]
    abi_dir = os.path.join(paths.PUBLISH_ROOT, abi)
    pkg_file = os.path.join(abi_dir, '%s-%s.tbz' % (self.NAME,
      self.VERSION))
    util.Makedirs(abi_dir)
    deps = self.DEPENDS
    if self.config.toolchain != 'glibc':
        deps = []
    bsd_pkg.CreatePkgFile(self.NAME, self.VERSION, self.config.arch,
        self.GetInstallLocation(), pkg_file, deps)
コード例 #2
0
ファイル: scan_packages.py プロジェクト: pvk84/naclports
def DownloadFiles(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = naclports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or CheckHash(file_info.name, file_info.md5):
        Log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def Check(file_info):
    if check_hashes and not CheckHash(file_info.name, file_info.md5):
      raise naclports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, GetHash(file_info.name)))

  if not files_to_download:
    Log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    Log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), FormatSize(total_size)))

    gsutil = FindGsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        LogVerbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          Check(file_info)
    else:
      for file_info in files_to_download:
        naclports.DownloadFile(file_info.name, file_info.url)
        Check(file_info)

  return filenames
コード例 #3
0
    def Clean(self):
        pkg = self.PackageFile()
        Log('removing %s' % pkg)
        if os.path.exists(pkg):
            os.remove(pkg)

        stamp_dir = os.path.join(paths.STAMP_DIR, self.NAME)
        Log('removing %s' % stamp_dir)
        util.RemoveTree(stamp_dir)
コード例 #4
0
ファイル: package.py プロジェクト: protonpopsicle/Webports
    def CheckDeps(self, valid_packages):
        for package in self.DEPENDS:
            if package not in valid_packages:
                Log('%s: Invalid dependency: %s' % (self.info, package))
                return False

        for package in self.CONFLICTS:
            if package not in valid_packages:
                Log('%s: Invalid conflict: %s' % (self.info, package))
                return False

        return True
コード例 #5
0
def RunGitCmd(directory, cmd, error_ok=False):
    cmd = ['git'] + cmd
    Trace('%s' % ' '.join(cmd))
    p = subprocess.Popen(cmd,
                         cwd=directory,
                         stderr=subprocess.PIPE,
                         stdout=subprocess.PIPE)
    stdout, stderr = p.communicate()
    if not error_ok and p.returncode != 0:
        if stdout:
            Log(stdout)
        if stderr:
            Log(stderr)
        raise Error('git command failed: %s' % cmd)
    return p.returncode
コード例 #6
0
    def Patch(self):
        stamp_file = os.path.join(self.GetStampDir(), 'nacl_patch')
        src_dir = self.GetBuildLocation()
        if self.URL is None:
            return

        if os.path.exists(stamp_file):
            self.Log('Skipping patch step (cleaning source tree)')
            cmd = ['git', 'clean', '-f', '-d']
            if not util.verbose:
                cmd.append('-q')
            self.RunCmd(cmd)
            return

        util.LogHeading('Patching')
        Log('Init git repo: %s' % src_dir)
        try:
            InitGitRepo(src_dir)
        except subprocess.CalledProcessError as e:
            raise Error(e)
        if os.path.exists(self.GetPatchFile()):
            Trace('applying patch to: %s' % src_dir)
            cmd = ['patch', '-p1', '-g0', '--no-backup-if-mismatch']
            with open(self.GetPatchFile()) as f:
                self.RunCmd(cmd, stdin=f)
            self.RunCmd(['git', 'add', '.'])
            self.RunCmd(['git', 'commit', '-m', 'Apply naclports patch'])

        WriteStamp(stamp_file, '')
コード例 #7
0
def WriteStamp(stamp_file, stamp_contents):
    """Write a stamp file to disk with the given file contents."""
    stamp_dir = os.path.dirname(stamp_file)
    util.Makedirs(stamp_dir)

    with open(stamp_file, 'w') as f:
        f.write(stamp_contents)
    Log('Wrote stamp: %s' % stamp_file)
コード例 #8
0
  def GitCloneToMirror(self):
    """Clone the upstream git repo into a local mirror. """
    git_url, git_commit = self.URL.split('@', 2)

    # Clone upstream git repo into local mirror, or update the existing
    # mirror.
    git_mirror = git_url.split('://', 2)[1]
    git_mirror = git_mirror.replace('/', '_')
    mirror_dir = os.path.join(paths.CACHE_ROOT, git_mirror)
    if os.path.exists(mirror_dir):
      if RunGitCmd(mirror_dir, ['rev-parse', git_commit + '^{commit}'],
                   error_ok=True) != 0:
        Log('Updating git mirror: %s' % util.RelPath(mirror_dir))
        RunGitCmd(mirror_dir, ['remote', 'update', '--prune'])
    else:
      Log('Mirroring upstream git repo: %s' % self.URL)
      RunGitCmd(paths.CACHE_ROOT, ['clone', '--mirror', git_url, git_mirror])
    Log('git mirror up-to-date: %s' % util.RelPath(mirror_dir))
    return mirror_dir, git_commit
コード例 #9
0
def InitGitRepo(directory):
    """Initialize the source git repository for a given package directory.

  This function works for unpacked tar files as well as cloned git
  repositories.  It sets up an 'upstream' branch pointing and the
  pristine upstream sources and a 'master' branch will contain changes
  specific to naclports (normally the result of applying nacl.patch).

  Args:
    directory: Directory containing unpacked package sources.
  """
    git_dir = os.path.join(directory, '.git')

    # If the upstream ref exists then we've already initialized this repo
    if os.path.exists(os.path.join(git_dir, 'refs', 'heads', 'upstream')):
        return

    if os.path.exists(git_dir):
        Log('Init existing git repo: %s' % directory)
        RunGitCmd(directory, ['checkout', '-b', 'placeholder'])
        RunGitCmd(directory, ['branch', '-D', 'upstream'], error_ok=True)
        RunGitCmd(directory, ['branch', '-D', 'master'], error_ok=True)
        RunGitCmd(directory, ['checkout', '-b', 'upstream'])
        RunGitCmd(directory, ['checkout', '-b', 'master'])
        RunGitCmd(directory, ['branch', '-D', 'placeholder'])
    else:
        Log('Init new git repo: %s' % directory)
        RunGitCmd(directory, ['init'])
        try:
            # Setup a bogus identity on the buildbots.
            if os.environ.get('BUILDBOT_BUILDERNAME'):
                RunGitCmd(directory, ['config', 'user.name', 'Naclports'])
                RunGitCmd(directory,
                          ['config', 'user.email', '*****@*****.**'])
            RunGitCmd(directory, ['add', '-f', '.'])
            RunGitCmd(directory, ['commit', '-m', 'Upstream version'])
            RunGitCmd(directory, ['checkout', '-b', 'upstream'])
            RunGitCmd(directory, ['checkout', 'master'])
        except:  # pylint: disable=bare-except
            # If git setup fails or is interrupted then remove the partially
            # initialized repository.
            util.RemoveTree(os.path.join(git_dir))
コード例 #10
0
ファイル: scan_packages.py プロジェクト: hjrhjr/naclports
def DownloadFiles(files, check_hashes=True):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = naclports.package_index.PREBUILT_ROOT
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        fullname = os.path.join(download_dir, basename)
        filenames.append((fullname, file_info.url))
        if os.path.exists(fullname):
            if not check_hashes or CheckHash(fullname, file_info.etag):
                Log('Up-to-date: %s' % file_info.name)
                continue
        files_to_download.append(
            FileInfo(fullname, file_info.size, file_info.url, file_info.etag))

    if not files_to_download:
        Log('All files up-to-date')
    else:
        total_size = sum(f[1] for f in files_to_download)
        Log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), FormatSize(total_size)))

        for file_info in files_to_download:
            naclports.DownloadFile(file_info.name, file_info.url)
            if check_hashes and not CheckHash(file_info.name, file_info.etag):
                raise naclports.Error('Checksum failed: %s' % file_info.name)

    return filenames
コード例 #11
0
ファイル: scan_packages.py プロジェクト: hjrhjr/naclports
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='naclports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -le (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        naclports.SetVerbose(True)

    sdk_version = naclports.util.GetSDKVersion()
    Log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/packages' % (
        naclports.GS_BUCKET, sdk_version, args.revision)
    gs_url = 'gs://' + base_path
    gsutil = naclports.util.FindInPath('gsutil.py')
    listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'listing.txt')
    if args.cache_listing and os.path.exists(listing_file):
        Log('Using pre-cached gs listing: %s' % listing_file)
        with open(listing_file) as f:
            listing = f.read()
    else:
        Log('Searching for packages at: %s' % gs_url)
        cmd = [sys.executable, gsutil, 'ls', '-le', gs_url]
        LogVerbose('Running: %s' % str(cmd))
        try:
            listing = subprocess.check_output(cmd)
        except subprocess.CalledProcessError as e:
            naclports.Error(e)
            return 1

    all_files = ParseGsUtilLs(listing)
    if args.cache_listing and not os.path.exists(listing_file):
        with open(listing_file, 'w') as f:
            f.write(listing)

    Log('Found %d packages [%s]' %
        (len(all_files), FormatSize(sum(f.size for f in all_files))))

    binaries = DownloadFiles(all_files, not args.skip_md5)
    index_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt')
    Log('Generating %s' % index_file)
    naclports.package_index.WriteIndex(index_file, binaries)
    Log('Done')
    return 0
コード例 #12
0
    def Extract(self):
        """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
        if self.IsGitUpstream():
            self.GitClone()
            return

        archive = self.DownloadLocation()
        if not archive:
            self.Log('Skipping extract; No upstream archive')
            return

        dest = self.GetBuildLocation()
        output_path, new_foldername = os.path.split(dest)
        util.Makedirs(output_path)

        # Check existing stamp file contents
        stamp_file = self.GetExtractStamp()
        stamp_contents = self.GetExtractStampContent()
        if os.path.exists(dest):
            if StampContentsMatch(stamp_file, stamp_contents):
                Log('Already up-to-date: %s' % util.RelPath(dest))
                return

            raise Error("Upstream archive or patch has changed.\n" +
                        "Please remove existing checkout and try again: '%s'" %
                        dest)

        util.LogHeading('Extracting')
        util.Makedirs(paths.OUT_DIR)
        tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
        try:
            ExtractArchive(archive, tmp_output_path)
            src = os.path.join(tmp_output_path, new_foldername)
            if not os.path.isdir(src):
                raise Error('Archive contents not found: %s' % src)
            LogVerbose("renaming '%s' -> '%s'" % (src, dest))
            os.rename(src, dest)
        finally:
            util.RemoveTree(tmp_output_path)

        self.RemoveStamps()
        WriteStamp(stamp_file, stamp_contents)
コード例 #13
0
    def Download(self, force_mirror=None):
        """Download upstream sources and verify integrity."""
        if self.IsGitUpstream():
            self.GitCloneToMirror()
            return

        archive = self.DownloadLocation()
        if not archive:
            return

        if force_mirror is None:
            force_mirror = os.environ.get('FORCE_MIRROR', False)
        self.DownloadArchive(force_mirror=force_mirror)

        if self.SHA1 is None:
            raise PkgFormatError('missing SHA1 attribute: %s' % self.info)

        util.VerifyHash(archive, self.SHA1)
        Log('verified: %s' % util.RelPath(archive))
コード例 #14
0
    def UpdatePatch(self):
        if self.URL is None:
            return

        git_dir = self.GetBuildLocation()
        if not os.path.exists(git_dir):
            raise Error('Source directory not found: %s' % git_dir)

        try:
            diff = subprocess.check_output(
                ['git', 'diff', 'upstream', '--no-ext-diff'], cwd=git_dir)
        except subprocess.CalledProcessError as e:
            raise Error('error running git in %s: %s' % (git_dir, str(e)))

        # Drop index lines for a more stable diff.
        diff = re.sub('\nindex [^\n]+\n', '\n', diff)

        # Drop binary files, as they don't work anyhow.
        diff = re.sub(
            'diff [^\n]+\n'
            '(new file [^\n]+\n)?'
            '(deleted file mode [^\n]+\n)?'
            'Binary files [^\n]+ differ\n', '', diff)

        # Filter out things from an optional per port skip list.
        diff_skip = os.path.join(self.root, 'diff_skip.txt')
        if os.path.exists(diff_skip):
            names = open(diff_skip).read().splitlines()
            new_diff = ''
            skipping = False
            for line in diff.splitlines():
                if line.startswith('diff --git '):
                    skipping = False
                    for name in names:
                        if line == 'diff --git a/%s b/%s' % (name, name):
                            skipping = True
                if not skipping:
                    new_diff += line + '\n'
            diff = new_diff

        # Write back out the diff.
        patch_path = self.GetPatchFile()
        preexisting = os.path.exists(patch_path)

        if not diff:
            if preexisting:
                Log('removing patch file: %s' % util.RelPath(patch_path))
                os.remove(patch_path)
            else:
                Log('no patch required: %s' % util.RelPath(git_dir))
            return

        if preexisting:
            with open(patch_path) as f:
                if diff == f.read():
                    Log('patch unchanged: %s' % util.RelPath(patch_path))
                    return

        with open(patch_path, 'w') as f:
            f.write(diff)

        if preexisting:
            Log('created patch: %s' % util.RelPath(patch_path))
        else:
            Log('updated patch: %s' % util.RelPath(patch_path))
コード例 #15
0
 def Log(self, message):
     Log('%s: %s' % (message, self.InfoString()))
コード例 #16
0
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='naclports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-p',
                        '--parallel',
                        action='store_true',
                        help='Download packages in parallel.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -L (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        naclports.SetVerbose(True)

    sdk_version = naclports.util.GetSDKVersion()
    Log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/publish' % (naclports.GS_BUCKET,
                                                    sdk_version, args.revision)
    gs_base_url = 'gs://' + base_path
    cmd = FindGsutil() + ['ls', gs_base_url]
    LogVerbose('Running: %s' % str(cmd))
    try:
        all_published = subprocess.check_output(cmd)
    except subprocess.CalledProcessError as e:
        raise naclports.Error("Command '%s' failed: %s" % (cmd, e))

    pkg_dir = re.findall(r'pkg_[\w-]+', all_published)
    for pkg in pkg_dir:
        listing_file = os.path.join(naclports.NACLPORTS_ROOT, 'lib',
                                    pkg + '_' + 'listing.txt')
        if args.cache_listing and os.path.exists(listing_file):
            Log('Using pre-cached gs listing: %s' % listing_file)
            with open(listing_file) as f:
                listing = f.read()
        else:
            gs_url = gs_base_url + '/' + pkg + '/*'
            Log('Searching for packages at: %s' % gs_url)
            cmd = FindGsutil() + ['stat', gs_url]
            LogVerbose('Running: %s' % str(cmd))
            try:
                listing = subprocess.check_output(cmd)
            except subprocess.CalledProcessError as e:
                raise naclports.Error("Command '%s' failed: %s" % (cmd, e))
            if args.cache_listing:
                with open(listing_file, 'w') as f:
                    f.write(listing)
        all_files = ParseGsUtilOutput(listing)
        Log('Found %d packages [%s] for %s' %
            (len(all_files), FormatSize(sum(f.size for f in all_files)), pkg))
        DownloadFiles(pkg, all_files, not args.skip_md5, args.parallel)
    Log('Done')
    return 0