def CreatePkgFile(self):
        """Create and pkg file for use with the FreeBSD pkg tool.

    Create a package from the result of the package's InstallStep.
    """
        install_dir = self.GetInstallLocation()
        if not os.path.exists(install_dir):
            Log('Skiping pkg creation. Install dir not found: %s' %
                install_dir)
            return

        # Strip all elf or pexe files in the install directory (except .o files
        # since we don't want to strip, for example, crt1.o)
        if not self.config.debug and self.config.toolchain != 'emscripten':
            strip = util.GetStrip(self.config)
            for root, _, files in os.walk(install_dir):
                for filename in files:
                    fullname = os.path.join(root, filename)
                    if (os.path.isfile(fullname) and util.IsElfFile(fullname)
                            and os.path.splitext(fullname)[1] != '.o'):
                        Log('stripping: %s %s' % (strip, fullname))
                        subprocess.check_call([strip, fullname])

        abi = 'pkg_' + self.config.toolchain
        if self.config.arch != self.config.toolchain:
            abi += "_" + util.arch_to_pkgarch[self.config.arch]
        abi_dir = os.path.join(paths.PUBLISH_ROOT, abi)
        pkg_file = os.path.join(abi_dir,
                                '%s-%s.tbz' % (self.NAME, self.VERSION))
        util.Makedirs(abi_dir)
        deps = self.DEPENDS
        if self.config.toolchain != 'glibc':
            deps = []
        bsd_pkg.CreatePkgFile(self.NAME, self.VERSION, self.config.arch,
                              self.GetInstallLocation(), pkg_file, deps)
def DownloadFiles(files, check_hashes=True, parallel=False):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = webports.package_index.PREBUILT_ROOT
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        file_info.name = os.path.join(download_dir, basename)
        filenames.append((file_info.name, file_info.url))
        if os.path.exists(file_info.name):
            if not check_hashes or CheckHash(file_info.name, file_info.md5):
                Log('Up-to-date: %s' % file_info.name)
                continue
        files_to_download.append(file_info)

    def Check(file_info):
        if check_hashes and not CheckHash(file_info.name, file_info.md5):
            raise webports.Error(
                'Checksum failed: %s\nExpected=%s\nActual=%s' %
                (file_info.name, file_info.md5, GetHash(file_info.name)))

    if not files_to_download:
        Log('All files up-to-date')
    else:
        total_size = sum(f.size for f in files_to_download)
        Log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), FormatSize(total_size)))

        gsutil = FindGsutil()
        if parallel:
            remaining_files = files_to_download
            num_files = 20
            while remaining_files:
                files = remaining_files[:num_files]
                remaining_files = remaining_files[num_files:]
                cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files
                                               ] + [download_dir]
                LogVerbose(cmd)
                subprocess.check_call(cmd)
                for file_info in files:
                    Check(file_info)
        else:
            for file_info in files_to_download:
                webports.DownloadFile(file_info.name, file_info.url)
                Check(file_info)

    return filenames
    def Clean(self):
        pkg = self.PackageFile()
        Log('removing %s' % pkg)
        if os.path.exists(pkg):
            os.remove(pkg)

        stamp_dir = os.path.join(paths.STAMP_DIR, self.NAME)
        Log('removing %s' % stamp_dir)
        util.RemoveTree(stamp_dir)
def RunGitCmd(directory, cmd, error_ok=False):
    cmd = ['git'] + cmd
    LogVerbose('%s' % ' '.join(cmd))
    p = subprocess.Popen(cmd,
                         cwd=directory,
                         stderr=subprocess.PIPE,
                         stdout=subprocess.PIPE)
    stdout, stderr = p.communicate()
    if not error_ok and p.returncode != 0:
        if stdout:
            Log(stdout)
        if stderr:
            Log(stderr)
        raise Error('git command failed: %s' % cmd)
    Trace('git exited with %d' % p.returncode)
    return p.returncode
def WriteStamp(stamp_file, stamp_contents):
    """Write a stamp file to disk with the given file contents."""
    stamp_dir = os.path.dirname(stamp_file)
    util.Makedirs(stamp_dir)

    with open(stamp_file, 'w') as f:
        f.write(stamp_contents)
    Log('Wrote stamp: %s' % stamp_file)
def InitGitRepo(directory):
    """Initialize the source git repository for a given package directory.

  This function works for unpacked tar files as well as cloned git
  repositories.  It sets up an 'upstream' branch pointing and the
  pristine upstream sources and a 'master' branch will contain changes
  specific to webports (normally the result of applying nacl.patch).

  Args:
    directory: Directory containing unpacked package sources.
  """
    git_dir = os.path.join(directory, '.git')

    # If the upstream ref exists then we've already initialized this repo
    if os.path.exists(os.path.join(git_dir, 'refs', 'heads', 'upstream')):
        return

    if os.path.exists(git_dir):
        Log('Init existing git repo: %s' % directory)
        RunGitCmd(directory, ['checkout', '-b', 'placeholder'])
        RunGitCmd(directory, ['branch', '-D', 'upstream'], error_ok=True)
        RunGitCmd(directory, ['branch', '-D', 'master'], error_ok=True)
        RunGitCmd(directory, ['checkout', '-b', 'upstream'])
        RunGitCmd(directory, ['checkout', '-b', 'master'])
        RunGitCmd(directory, ['branch', '-D', 'placeholder'])
    else:
        Log('Init new git repo: %s' % directory)
        RunGitCmd(directory, ['init'])
        try:
            # Setup a bogus identity on the buildbots.
            if os.environ.get('BUILDBOT_BUILDERNAME'):
                RunGitCmd(directory, ['config', 'user.name', 'Naclports'])
                RunGitCmd(directory,
                          ['config', 'user.email', '*****@*****.**'])
            RunGitCmd(directory, ['add', '-f', '.'])
            RunGitCmd(directory, ['commit', '-m', 'Upstream version'])
            RunGitCmd(directory, ['checkout', '-b', 'upstream'])
            RunGitCmd(directory, ['checkout', 'master'])
        except:  # pylint: disable=bare-except
            # If git setup fails or is interrupted then remove the partially
            # initialized repository.
            util.RemoveTree(os.path.join(git_dir))
    def GitCloneToMirror(self):
        """Clone the upstream git repo into a local mirror. """
        git_url, git_commit = self.URL.split('@', 2)

        # Clone upstream git repo into local mirror, or update the existing
        # mirror.
        git_mirror = git_url.split('://', 2)[1]
        git_mirror = git_mirror.replace('/', '_')
        mirror_dir = os.path.join(paths.CACHE_ROOT, git_mirror)
        if os.path.exists(mirror_dir):
            if RunGitCmd(mirror_dir, ['rev-parse', git_commit + '^{commit}'],
                         error_ok=True) != 0:
                Log('Updating git mirror: %s' % util.RelPath(mirror_dir))
                RunGitCmd(mirror_dir, ['remote', 'update', '--prune'])
        else:
            Log('Mirroring upstream git repo: %s' % self.URL)
            RunGitCmd(paths.CACHE_ROOT,
                      ['clone', '--mirror', git_url, git_mirror])
        Log('git mirror up-to-date: %s' % util.RelPath(mirror_dir))
        return mirror_dir, git_commit
def main(args):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('revision',
                        metavar='REVISION',
                        help='webports revision to to scan for.')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Output extra information.')
    parser.add_argument('-p',
                        '--parallel',
                        action='store_true',
                        help='Download packages in parallel.')
    parser.add_argument('-l',
                        '--cache-listing',
                        action='store_true',
                        help='Cached output of gsutil -L (for testing).')
    parser.add_argument(
        '--skip-md5',
        action='store_true',
        help='Assume on-disk files are up-to-date (for testing).')
    args = parser.parse_args(args)
    if args.verbose:
        webports.SetVerbose(True)

    sdk_version = webports.util.GetSDKVersion()
    Log('Scanning packages built for pepper_%s at revsion %s' %
        (sdk_version, args.revision))
    base_path = '%s/builds/pepper_%s/%s/packages' % (
        webports.GS_BUCKET, sdk_version, args.revision)
    gs_url = 'gs://' + base_path + '/*'
    listing_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'listing.txt')

    if args.cache_listing and os.path.exists(listing_file):
        Log('Using pre-cached gs listing: %s' % listing_file)
        with open(listing_file) as f:
            listing = f.read()
    else:
        Log('Searching for packages at: %s' % gs_url)
        cmd = FindGsutil() + ['stat', gs_url]
        LogVerbose('Running: %s' % str(cmd))
        try:
            listing = subprocess.check_output(cmd)
        except subprocess.CalledProcessError as e:
            raise webports.Error("Command '%s' failed: %s" % (cmd, e))
        if args.cache_listing:
            with open(listing_file, 'w') as f:
                f.write(listing)

    all_files = ParseGsUtilOutput(listing)

    Log('Found %d packages [%s]' %
        (len(all_files), FormatSize(sum(f.size for f in all_files))))

    binaries = DownloadFiles(all_files, not args.skip_md5, args.parallel)
    index_file = os.path.join(webports.NACLPORTS_ROOT, 'lib', 'prebuilt.txt')
    Log('Generating %s' % index_file)
    webports.package_index.WriteIndex(index_file, binaries)
    Log('Done')
    return 0
    def Extract(self):
        """Extract the package archive into its build location.

    This method assumes the package has already been downloaded.
    """
        if self.IsGitUpstream():
            self.GitClone()
            return

        archive = self.DownloadLocation()
        if not archive:
            self.Log('Skipping extract; No upstream archive')
            return

        dest = self.GetBuildLocation()
        output_path, new_foldername = os.path.split(dest)
        util.Makedirs(output_path)

        # Check existing stamp file contents
        stamp_file = self.GetExtractStamp()
        stamp_contents = self.GetExtractStampContent()
        if os.path.exists(dest):
            if StampContentsMatch(stamp_file, stamp_contents):
                Log('Already up-to-date: %s' % util.RelPath(dest))
                return

            raise Error("Upstream archive or patch has changed.\n" +
                        "Please remove existing checkout and try again: '%s'" %
                        dest)

        util.LogHeading('Extracting')
        util.Makedirs(paths.OUT_DIR)
        tmp_output_path = tempfile.mkdtemp(dir=paths.OUT_DIR)
        try:
            ExtractArchive(archive, tmp_output_path)
            src = os.path.join(tmp_output_path, new_foldername)
            if not os.path.isdir(src):
                raise Error('Archive contents not found: %s' % src)
            LogVerbose("renaming '%s' -> '%s'" % (src, dest))
            os.rename(src, dest)
        finally:
            util.RemoveTree(tmp_output_path)

        self.RemoveStamps()
        WriteStamp(stamp_file, stamp_contents)
    def Download(self, force_mirror=None):
        """Download upstream sources and verify integrity."""
        if self.IsGitUpstream():
            self.GitCloneToMirror()
            return

        archive = self.DownloadLocation()
        if not archive:
            return

        if force_mirror is None:
            force_mirror = os.environ.get('FORCE_MIRROR', False)
        self.DownloadArchive(force_mirror=force_mirror)

        if self.SHA1 is None:
            raise PkgFormatError('missing SHA1 attribute: %s' % self.info)

        util.VerifyHash(archive, self.SHA1)
        Log('verified: %s' % util.RelPath(archive))
    def UpdatePatch(self):
        if self.URL is None:
            return

        git_dir = self.GetBuildLocation()
        if not os.path.exists(git_dir):
            raise Error('Source directory not found: %s' % git_dir)

        try:
            diff = subprocess.check_output(
                ['git', 'diff', 'upstream', '--no-ext-diff'], cwd=git_dir)
        except subprocess.CalledProcessError as e:
            raise Error('error running git in %s: %s' % (git_dir, str(e)))

        # Drop index lines for a more stable diff.
        diff = re.sub('\nindex [^\n]+\n', '\n', diff)

        # Drop binary files, as they don't work anyhow.
        diff = re.sub(
            'diff [^\n]+\n'
            '(new file [^\n]+\n)?'
            '(deleted file mode [^\n]+\n)?'
            'Binary files [^\n]+ differ\n', '', diff)

        # Always filter out config.sub changes
        diff_skip = ['*config.sub']

        # Add optional per-port skip list.
        diff_skip_file = os.path.join(self.root, 'diff_skip.txt')
        if os.path.exists(diff_skip_file):
            with open(diff_skip_file) as f:
                diff_skip += f.read().splitlines()

        new_diff = ''
        skipping = False
        for line in diff.splitlines():
            if line.startswith('diff --git a/'):
                filename = line[len('diff --git a/'):].split()[0]
                skipping = False
                for skip in diff_skip:
                    if fnmatch.fnmatch(filename, skip):
                        skipping = True
                        break
            if not skipping:
                new_diff += line + '\n'
        diff = new_diff

        # Write back out the diff.
        patch_path = self.GetPatchFile()
        preexisting = os.path.exists(patch_path)

        if not diff:
            if preexisting:
                Log('removing patch file: %s' % util.RelPath(patch_path))
                os.remove(patch_path)
            else:
                Log('no patch required: %s' % util.RelPath(git_dir))
            return

        if preexisting:
            with open(patch_path) as f:
                if diff == f.read():
                    Log('patch unchanged: %s' % util.RelPath(patch_path))
                    return

        with open(patch_path, 'w') as f:
            f.write(diff)

        if preexisting:
            Log('created patch: %s' % util.RelPath(patch_path))
        else:
            Log('updated patch: %s' % util.RelPath(patch_path))
 def Log(self, message):
     Log('%s: %s' % (message, self.InfoString()))