Пример #1
0
    if not name.endswith('.udeb'):
      for f in contents:
        bu.AppendEntry(file_pkg, f, nva)

  pkg_info = dbs['pkg_info']
  src_info = dbs['src_info']
  pkg_deps = dbs['pkg_deps']
  file_pkg = dbs['file_pkg']
  pool_pkg = dbs['pool_pkg']

  CountProgress()
  for name in src_names:
    IndexSource(name)
  for name in pkg_names:
    IndexBinary(name)
  return ru.SelectLatestPackages(pkg_info)


def _TraversePool():
  """Compile lists of package files in the pool hierarchy
  """

  def DoTraverse(_arg, dir, names):
    for name in names:
      if name.endswith('.dsc'):
        src_names.append(os.path.join(dir, name))
      elif name.endswith('.deb') or name.endswith('.udeb'):
        pkg_names.append(os.path.join(dir, name))

  src_names = []
  pkg_names = []
Пример #2
0
 def CompileList(_arg, dbs):
     latest = ru.SelectLatestPackages(dbs['pkg_info'])
     return ru.GroupByArch(latest)
Пример #3
0
 def DoSnapshot(_arg, dbs):
     return ru.SelectLatestPackages(dbs['pkg_info'])
Пример #4
0
def main():
    lu.SetLogConsole()
    options, proper = _ParseCommandLine()

    def DoSnapshot(_arg, dbs):
        return ru.SelectLatestPackages(dbs['pkg_info'])

    def DoRetrieve(release, dbs):
        releases = dbs['releases']
        aliases = dbs['aliases']
        return ru.FetchReleaseList(release, releases, aliases)[0]

    def DoImport(lines):
        p = []
        for line in lines:
            p.append(line.rstrip('\n'))
        return p

    def DoCollect(_arg, dbs):
        return ru.CollectPackageVersions(dbs['pkg_info'])

    def DoVerify(nva_list, dbs):
        pkg_deps = du.ParseDependencyTable(dbs['pkg_deps'])
        return ru.CollectSources(nva_list, pkg_deps, dbs['src_info'])

    if options.track:
        su.ConfirmTrack(options.track)

    packages = []

    # Add latest packages from the Berkeley DB tables.

    if options.snapshot:
        packages.extend(bu.RunWithDB(['pkg_info'], DoSnapshot))

    # Add packages in Packages files in a dists/ subtree.

    if options.dist is not None:
        for dist in options.dist:
            packages.extend(ru.GetUpstreamReleaseList(dist))

    # Add packages listed in a text file.

    if options.imp is not None:
        for imp in options.imp:
            packages.extend(ou.RunWithFileInput(DoImport, imp))

    # Add packages in an existing release.

    if options.release is not None:
        for release in options.release:
            db_list = ['releases', 'aliases']
            result = bu.RunWithDB(db_list, DoRetrieve, release)
            packages.extend(result)

    # Version selection: work through the list to determine which
    # version of each package should be included in the release.

    ver_dict = bu.RunWithDB(['pkg_info'], DoCollect, None)
    packages = ru.SelectLatestPackages(packages)
    ver_dict = ru.CutOffVersions(ver_dict, packages)
    if options.track:
        name = os.path.join('config', options.track + '.spec')
        ver_dict = ru.FilterVersionWithFile(ver_dict, name)

    # Convert results of version selection back to an nva list.

    packages = []
    for n, a in ver_dict:
        if not ver_dict[(n, a)]: continue
        packages.append('_'.join([n, ver_dict[(n, a)][0], a]))
    packages.sort()

    if len(proper) != 0:

        # Publish the release and write records into Berkeley DB tables.

        if proper[0] == 'commit':
            ru.GenerateReleaseList(options.track, packages)

        # Compare binary package list with an existing release.

        if proper[0] == 'diff':
            rel = proper[1]
            db_list = ['releases', 'aliases']
            cf = bu.RunWithDB(db_list, DoRetrieve, rel)
            old_dict = dict.fromkeys(cf)
            new_dict = dict.fromkeys(packages)
            print 'Packages discarded from', rel + ':'
            for p in sorted(cf):
                if p not in new_dict:
                    print '-', p
            print
            print 'Packages added to the new release:'
            for p in packages:
                if p not in old_dict:
                    print '+', p

        # Republish a previously-defined release.

        if proper[0] == 'rebuild':
            [track, ver] = options.release[0].split('/')
            ru.GenerateReleaseVersion(track, ver)

        # Perform release consistency checking.

        if proper[0] == 'verify':
            underlying = []
            bases = su.GetSetting(options.track, 'Underlying')
            if bases:

                # Build the underlying package list and import underlying
                # dependency databases when necessary

                for base in bases.split(', '):
                    base_list = base.split(' ', 1)
                    if len(base_list) != 2:
                        lg.error('malformed underlying release ' + base)
                        continue
                    base_url = base_list[0]
                    base_rel = base_list[1]
                    base_packages = bu.FetchUnderlyingRelease(
                        base_url, base_rel)
                    bu.ImportUnderlyingTables(base_url, base_packages)
                    underlying.extend(base_packages)

                # Select only the latest packages in either packages (release
                # to be verified) or underlying (underlying releases)

                combined = ru.SelectLatestPackages(packages + underlying)
                combined = dict.fromkeys(combined)
                for pkg in packages:
                    if pkg in combined:
                        combined[pkg] = True
                packages = []
                underlying = []
                for pkg in combined:
                    if combined[pkg]:
                        packages.append(pkg)
                    else:
                        underlying.append(pkg)

            arch_dict = ru.GroupByArch(packages)
            underlying_dict = ru.GroupByArch(underlying)
            for arch in arch_dict:
                lg.info('Checking dependency for architecture ' + arch)
                underlying_dict.setdefault(arch, [])
                vu.CheckDependency(arch_dict[arch], underlying_dict[arch],
                                   options.no_conflicts)
            bu.RunWithDB(['pkg_deps', 'src_info'], DoVerify, packages)

    # Default action: only list the binary packages in the release.

    else:
        for nva in packages:
            print nva