예제 #1
0
def main(options, repo_dir):
  global _new_package
  global retry_packages

  def DoImport(lines):
    p = []
    for line in lines:
      p.append(line.rstrip('\n'))
    return p

  # If --retry option was specified, import lists of packages to be retried
  if options.retry is not None:
    for retry in options.retry:
      retry_packages.extend(ou.RunWithFileInput(DoImport, retry))

  current_cwd = os.getcwd()
  lu.SetLogConsole()
  try:
    os.chdir(repo_dir)
    if su.GetSetting(None, 'Mode') != 'tracking':
      lg.error('Repository is not in tracking mode')
      sys.exit()
    try:
      names = _TraversePool()
      packages = bu.RunWithDB(None, IndexPool, names)
      if _new_package:
        ru.GenerateReleaseList('snapshot', packages)
    except KeyboardInterrupt:
      lg.info('Received keyboard interrupt, terminating...')
  finally:
    os.chdir(current_cwd)
예제 #2
0
def _LoadSettings():
    """Load the configuration file into the _settings dictionary
  """

    global _settings

    if _settings is None:
        _settings = ou.RunWithFileInput(_ParseConfig, 'config/repository')
        if _settings is None:
            lg.error('Cannot load repository settings')
            sys.exit()
예제 #3
0
  def IndexSource(name):
    """Index a source package (pool_pkg, src_info)
    """

    global _new_package, source_count, source_progress

    src = os.path.split(name)[1]
    if src in pool_pkg:
      file_size = str(os.stat(name).st_size)
      if pool_pkg[src] != file_size:
        lg.warning('File ' + name + ' does not match indexed data')
      return

    DisplayProgress('Sources', 'Indexing ' + name, source_count, source_progress)
    pool_pkg[src] = str(os.stat(name).st_size)
    lines = ou.RunWithFileInput(pu.StripSignature, name)
    attr_dict = pu.ParseAttributes(lines)
    nv = pu.GetSourceID(attr_dict)
    src_info[nv] = du.BuildSrcInfoText(name, attr_dict)
    _new_package = True
    source_progress += 1
예제 #4
0
def _ProcessChangesFile(changes, repo_dir, lists, dbs):
    """Process a .changes file which represents an upload
  """

    src_names, pkg_names = lists
    incoming_dir = os.path.join(repo_dir, 'incoming')
    src_info = dbs['src_info']
    pool_pkg = dbs['pool_pkg']

    # Remove the .changes file from the incoming directory and check its
    # signature (raise EnvironmentError if not properly signed).

    ou.CopyDeleteFiles(incoming_dir, '.', [changes])
    cu.VerifySignature(changes)

    # Parse the .changes file into a Python dictionary.

    lines = ou.RunWithFileInput(pu.StripSignature, changes)
    changes_dict = pu.ParseAttributes(lines)
    md5_dict = _GetUploadedFiles(changes_dict)
    version = changes_dict['Version'][0]
    source_pkg = changes_dict['Source'][0]

    # Establish the status of the upload in relation to the repository.
    # Is the source package already in the repository?  Is the upstream
    # source already in the repository?  It this a native package?

    new_source = True
    new_upstream = True
    native = True

    if (source_pkg + '_' + version) in src_info:
        new_source = False
    if version.find('-') != -1:
        upstream_ver = '-'.join(version.split('-')[:-1])
        source_header = source_pkg + '_' + upstream_ver + '-'
        if bu.FindKeyStartingWith(source_header, src_info):
            new_upstream = False
        native = False

    # Now check the contents of the upload.  Does it have native source,
    # upstream source, or Debian diff?  Is any of the files being
    # uploaded already present in the pool?

    has_source = False
    has_diff = False
    has_tar = False
    has_orig_tar = False

    for name in md5_dict:
        if name in pool_pkg:
            lg.error('File ' + name + ' is already in the pool')
            raise EnvironmentError
        if name.endswith('.dsc'):
            has_source = True
        elif name.endswith('.orig.tar.gz'):
            has_orig_tar = True
        elif name.endswith('.orig.tar.bz2'):
            has_orig_tar = True
        elif name.endswith('.tar.gz'):
            has_tar = True
        elif name.endswith('.tar.bz2'):
            has_tar = True
        elif name.endswith('.diff.gz'):
            has_diff = True

    # Check if the contents of the upload matches with its status (in
    # accordance with the Debian Policy).  You can relax the rules by
    # modifying the checks, but make sure that the modified logic still
    # requires all binary packages to come with sources.

    if new_source:
        if not has_source:
            lg.error('New package upload must contain source .dsc')
            raise EnvironmentError
        if native:
            prefix = 'New native package upload must '
            if has_diff:
                lg.error(prefix + 'not contain .diff')
                raise EnvironmentError
            if has_orig_tar:
                lg.error(prefix + 'not contain upstream .orig.tar')
                raise EnvironmentError
            if not has_tar:
                lg.error(prefix + 'contain source .tar')
                raise EnvironmentError
        else:
            prefix = 'New non-native package upload must '
            if not has_diff:
                lg.error(prefix + 'contain .diff')
                raise EnvironmentError
            if new_upstream:
                if not has_orig_tar:
                    lg.error('New upstream non-native package upload ' +
                             'must contain upstream .orig.tar')
                    raise EnvironmentError
            else:
                if has_orig_tar:
                    lg.error('New distribution non-native package upload ' +
                             'must not contain upstream .orig.tar')
                    raise EnvironmentError
            if has_tar:
                lg.error(prefix + ' not contain source .tar')
                raise EnvironmentError

    # Move rest of the uploaded files from incoming to the current
    # (temp) directory and check their MD5 hash value (raise
    # EnvironmentError if the hash values disagree).

    ou.CopyDeleteFiles(incoming_dir, '.', md5_dict)
    cu.VerifyMD5Hash(md5_dict)

    # Move uploaded files (along with .changes) into the pool.

    pool_loc = pu.GetPathInPool(source_pkg)
    pool_dir = os.path.join(repo_dir, pool_loc)
    ou.CopyDeleteFiles('.', pool_dir, md5_dict)
    ou.CopyDeleteFiles('.', pool_dir, [changes])

    # Compile the list of source package files (src_names) and binary
    # package files (pkg_names) added to the repository by this upload.
    # There is no need to return these lists because they are passed in
    # by reference.

    for name in md5_dict:
        if name.endswith('.dsc'):
            src_names.append(os.path.join(pool_loc, name))
        elif name.endswith('.deb') or name.endswith('.udeb'):
            pkg_names.append(os.path.join(pool_loc, name))
예제 #5
0
def main():
    lu.SetLogConsole()
    options, proper = _ParseCommandLine()

    def DoSnapshot(_arg, dbs):
        return ru.SelectLatestPackages(dbs['pkg_info'])

    def DoRetrieve(release, dbs):
        releases = dbs['releases']
        aliases = dbs['aliases']
        return ru.FetchReleaseList(release, releases, aliases)[0]

    def DoImport(lines):
        p = []
        for line in lines:
            p.append(line.rstrip('\n'))
        return p

    def DoCollect(_arg, dbs):
        return ru.CollectPackageVersions(dbs['pkg_info'])

    def DoVerify(nva_list, dbs):
        pkg_deps = du.ParseDependencyTable(dbs['pkg_deps'])
        return ru.CollectSources(nva_list, pkg_deps, dbs['src_info'])

    if options.track:
        su.ConfirmTrack(options.track)

    packages = []

    # Add latest packages from the Berkeley DB tables.

    if options.snapshot:
        packages.extend(bu.RunWithDB(['pkg_info'], DoSnapshot))

    # Add packages in Packages files in a dists/ subtree.

    if options.dist is not None:
        for dist in options.dist:
            packages.extend(ru.GetUpstreamReleaseList(dist))

    # Add packages listed in a text file.

    if options.imp is not None:
        for imp in options.imp:
            packages.extend(ou.RunWithFileInput(DoImport, imp))

    # Add packages in an existing release.

    if options.release is not None:
        for release in options.release:
            db_list = ['releases', 'aliases']
            result = bu.RunWithDB(db_list, DoRetrieve, release)
            packages.extend(result)

    # Version selection: work through the list to determine which
    # version of each package should be included in the release.

    ver_dict = bu.RunWithDB(['pkg_info'], DoCollect, None)
    packages = ru.SelectLatestPackages(packages)
    ver_dict = ru.CutOffVersions(ver_dict, packages)
    if options.track:
        name = os.path.join('config', options.track + '.spec')
        ver_dict = ru.FilterVersionWithFile(ver_dict, name)

    # Convert results of version selection back to an nva list.

    packages = []
    for n, a in ver_dict:
        if not ver_dict[(n, a)]: continue
        packages.append('_'.join([n, ver_dict[(n, a)][0], a]))
    packages.sort()

    if len(proper) != 0:

        # Publish the release and write records into Berkeley DB tables.

        if proper[0] == 'commit':
            ru.GenerateReleaseList(options.track, packages)

        # Compare binary package list with an existing release.

        if proper[0] == 'diff':
            rel = proper[1]
            db_list = ['releases', 'aliases']
            cf = bu.RunWithDB(db_list, DoRetrieve, rel)
            old_dict = dict.fromkeys(cf)
            new_dict = dict.fromkeys(packages)
            print 'Packages discarded from', rel + ':'
            for p in sorted(cf):
                if p not in new_dict:
                    print '-', p
            print
            print 'Packages added to the new release:'
            for p in packages:
                if p not in old_dict:
                    print '+', p

        # Republish a previously-defined release.

        if proper[0] == 'rebuild':
            [track, ver] = options.release[0].split('/')
            ru.GenerateReleaseVersion(track, ver)

        # Perform release consistency checking.

        if proper[0] == 'verify':
            underlying = []
            bases = su.GetSetting(options.track, 'Underlying')
            if bases:

                # Build the underlying package list and import underlying
                # dependency databases when necessary

                for base in bases.split(', '):
                    base_list = base.split(' ', 1)
                    if len(base_list) != 2:
                        lg.error('malformed underlying release ' + base)
                        continue
                    base_url = base_list[0]
                    base_rel = base_list[1]
                    base_packages = bu.FetchUnderlyingRelease(
                        base_url, base_rel)
                    bu.ImportUnderlyingTables(base_url, base_packages)
                    underlying.extend(base_packages)

                # Select only the latest packages in either packages (release
                # to be verified) or underlying (underlying releases)

                combined = ru.SelectLatestPackages(packages + underlying)
                combined = dict.fromkeys(combined)
                for pkg in packages:
                    if pkg in combined:
                        combined[pkg] = True
                packages = []
                underlying = []
                for pkg in combined:
                    if combined[pkg]:
                        packages.append(pkg)
                    else:
                        underlying.append(pkg)

            arch_dict = ru.GroupByArch(packages)
            underlying_dict = ru.GroupByArch(underlying)
            for arch in arch_dict:
                lg.info('Checking dependency for architecture ' + arch)
                underlying_dict.setdefault(arch, [])
                vu.CheckDependency(arch_dict[arch], underlying_dict[arch],
                                   options.no_conflicts)
            bu.RunWithDB(['pkg_deps', 'src_info'], DoVerify, packages)

    # Default action: only list the binary packages in the release.

    else:
        for nva in packages:
            print nva