Exemple #1
0
    def add_package(self, path, deltas, repo_uri):
        package = pisi.package.Package(path, "r")
        md = package.get_metadata()
        md.package.packageSize = os.path.getsize(path)
        md.package.packageHash = util.sha1_file(path)
        if ctx.config.options and ctx.config.options.absolute_urls:
            md.package.packageURI = os.path.realpath(path)
        else:  # create relative path by default
            # TODO: in the future well do all of this with purl/pfile/&helpers
            # really? heheh -- future exa
            md.package.packageURI = util.removepathprefix(repo_uri, path)
        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.error(_("Package %s: metadata corrupt, skipping...") % md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            # No need to carry these with index (#3965)
            md.package.files = None
            md.package.additionalFiles = None

            if md.package.name in deltas:
                for delta_path in deltas[md.package.name]:
                    delta = metadata.Delta()
                    delta.packageURI = util.removepathprefix(repo_uri, delta_path)
                    delta.packageSize = os.path.getsize(delta_path)
                    delta.packageHash = util.sha1_file(delta_path)
                    name, buildFrom, buildTo = util.parse_delta_package_name(delta_path)
                    delta.buildFrom = buildFrom
                    md.package.deltaPackages.append(delta)

            self.packages.append(md.package)
Exemple #2
0
    def add_package(self, path, deltas, repo_uri):
        package = pisi.package.Package(path, 'r')
        md = package.get_metadata()
        md.package.packageSize = os.path.getsize(path)
        md.package.packageHash = util.sha1_file(path)
        if ctx.config.options and ctx.config.options.absolute_urls:
            md.package.packageURI = os.path.realpath(path)
        else:                           # create relative path by default
            # TODO: in the future well do all of this with purl/pfile/&helpers
            # really? heheh -- future exa
            md.package.packageURI = util.removepathprefix(repo_uri, path)
        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.error(_('Package %s: metadata corrupt, skipping...') % md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            # No need to carry these with index (#3965)
            md.package.files = None
            md.package.additionalFiles = None

            if md.package.name in deltas:
                for delta_path in deltas[md.package.name]:
                    delta = metadata.Delta()
                    delta.packageURI = util.removepathprefix(repo_uri, delta_path)
                    delta.packageSize = os.path.getsize(delta_path)
                    delta.packageHash = util.sha1_file(delta_path)
                    name, buildFrom, buildTo = util.parse_delta_package_name(delta_path)
                    delta.buildFrom = buildFrom
                    md.package.deltaPackages.append(delta)

            self.packages.append(md.package)
Exemple #3
0
 def testFetch(self):
     self.fetch.fetch()
     fetchedFile = os.path.join(self.destpath, self.url.filename())
     if os.access(fetchedFile, os.R_OK):
         self.assertEqual(util.sha1_file(fetchedFile),
                          self.spec.source.archive.sha1sum)
     os.remove(fetchedFile)
Exemple #4
0
 def testFetch(self):
     self.fetch.fetch()
     fetchedFile = os.path.join(self.destpath, self.url.filename())
     if os.access(fetchedFile, os.R_OK):
         self.assertEqual(util.sha1_file(fetchedFile),
                          self.spec.source.archiveSHA1)
     os.remove(fetchedFile)
Exemple #5
0
def calculate_download_sizes(order):
    total_size = cached_size = 0

    installdb = pisi.db.installdb.InstallDB()
    packagedb = pisi.db.packagedb.PackageDB()

    for pkg in [packagedb.get_package(name) for name in order]:

        delta = None
        if installdb.has_package(pkg.name):
            (version, release, build) = installdb.get_version(pkg.name)
            delta = pkg.get_delta(buildFrom=build)

        if delta:
            fn = os.path.basename(delta.packageURI)
            pkg_hash = delta.packageHash
            pkg_size = delta.packageSize
        else:
            fn = os.path.basename(pkg.packageURI)
            pkg_hash = pkg.packageHash
            pkg_size = pkg.packageSize

        path = util.join_path(ctx.config.cached_packages_dir(), fn)

        # check the file and sha1sum to be sure it _is_ the cached package
        if os.path.exists(path) and util.sha1_file(path) == pkg_hash:
            cached_size += pkg_size

        total_size += pkg_size

    ctx.ui.notify(ui.cached, total=total_size, cached=cached_size)
    return total_size, cached_size
    def from_name(name, ignore_dep=None):
        packagedb = pisi.db.packagedb.PackageDB()
        # download package and return an installer object
        # find package in repository
        repo = packagedb.which_repo(name)
        if repo:
            repodb = pisi.db.repodb.RepoDB()
            ctx.ui.info(_("Package %s found in repository %s") % (name, repo))

            repo = repodb.get_repo(repo)
            pkg = packagedb.get_package(name)
            delta = None

            installdb = pisi.db.installdb.InstallDB()
            # Package is installed. This is an upgrade. Check delta.
            if installdb.has_package(pkg.name):
                (version, release, build, distro,
                 distro_release) = installdb.get_version_and_distro_release(
                     pkg.name)
                # pisi distro upgrade should not use delta support
                if distro == pkg.distribution and distro_release == pkg.distributionRelease:
                    delta = pkg.get_delta(buildFrom=build)

            ignore_delta = ctx.config.values.general.ignore_delta

            # If delta exists than use the delta uri.
            if delta and not ignore_delta:
                pkg_uri = delta.packageURI
                pkg_hash = delta.packageHash
            else:
                pkg_uri = pkg.packageURI
                pkg_hash = pkg.packageHash

            uri = pisi.uri.URI(pkg_uri)
            if uri.is_absolute_path():
                pkg_path = str(pkg_uri)
            else:
                pkg_path = os.path.join(
                    os.path.dirname(repo.indexuri.get_uri()), str(uri.path()))

            ctx.ui.info(_("Package URI: %s") % pkg_path, verbose=True)

            # Bug 4113
            cached_file = pisi.package.Package.is_cached(pkg_path)
            if cached_file and util.sha1_file(cached_file) != pkg_hash:
                os.unlink(cached_file)

            install_op = Install(pkg_path, ignore_dep)

            # Bug 4113
            downloaded_file = install_op.package.filepath
            if pisi.util.sha1_file(downloaded_file) != pkg_hash:
                raise pisi.Error(
                    _("Download Error: Package does not match the repository package."
                      ))

            return install_op
        else:
            raise Error(
                _("Package %s not found in any active repository.") % name)
Exemple #7
0
def calculate_download_sizes(order):
    total_size = cached_size = 0

    installdb = pisi.db.installdb.InstallDB()
    packagedb = pisi.db.packagedb.PackageDB()

    for pkg in [packagedb.get_package(name) for name in order]:

        delta = None
        if installdb.has_package(pkg.name):
            (version, release, build) = installdb.get_version(pkg.name)
            delta = pkg.get_delta(buildFrom=build)

        if delta:
            fn = os.path.basename(delta.packageURI)
            pkg_hash = delta.packageHash
            pkg_size = delta.packageSize
        else:
            fn = os.path.basename(pkg.packageURI)
            pkg_hash = pkg.packageHash
            pkg_size = pkg.packageSize

        path = util.join_path(ctx.config.cached_packages_dir(), fn)

        # check the file and sha1sum to be sure it _is_ the cached package
        if os.path.exists(path) and util.sha1_file(path) == pkg_hash:
            cached_size += pkg_size

        total_size += pkg_size

    ctx.ui.notify(ui.cached, total=total_size, cached=cached_size)
    return total_size, cached_size
Exemple #8
0
 def testResume(self):
     resume_test_file = "tests/helloworld/helloworld-2.0.tar.bz2.part"
     shutil.copy(resume_test_file, ctx.config.archives_dir())
     self.fetch.fetch()
     fetchedFile = os.path.join(self.destpath, self.url.filename())
     if os.access(fetchedFile, os.R_OK):
         self.assertEqual(util.sha1_file(fetchedFile),
                          self.spec.source.archive.sha1sum)
     os.remove(fetchedFile)
Exemple #9
0
 def testResume(self):
     resume_test_file = "tests/helloworld/hello-1.3.tar.gz.part"
     shutil.copy(resume_test_file, ctx.config.archives_dir())
     self.fetch.fetch()
     fetchedFile = os.path.join(self.destpath, self.url.filename())
     if os.access(fetchedFile, os.R_OK):
         self.assertEqual(util.sha1_file(fetchedFile),
                          self.spec.source.archiveSHA1)
     os.remove(fetchedFile)
Exemple #10
0
    def from_name(name, ignore_dep = None):
        packagedb = pisi.db.packagedb.PackageDB()
        # download package and return an installer object
        # find package in repository
        repo = packagedb.which_repo(name)
        if repo:
            repodb = pisi.db.repodb.RepoDB()
            ctx.ui.info(_("Package %s found in repository %s") % (name, repo))

            repo = repodb.get_repo(repo)
            pkg = packagedb.get_package(name)
            delta = None

            installdb = pisi.db.installdb.InstallDB()
            # Package is installed. This is an upgrade. Check delta.
            if installdb.has_package(pkg.name):
                (version, release, build, distro, distro_release) = installdb.get_version_and_distro_release(pkg.name)
                # pisi distro upgrade should not use delta support
                if distro == pkg.distribution and distro_release == pkg.distributionRelease:
                    delta = pkg.get_delta(release)

            ignore_delta = ctx.config.values.general.ignore_delta

            # If delta exists than use the delta uri.
            if delta and not ignore_delta:
                pkg_uri = delta.packageURI
                pkg_hash = delta.packageHash
            else:
                pkg_uri = pkg.packageURI
                pkg_hash = pkg.packageHash

            uri = pisi.uri.URI(pkg_uri)
            if uri.is_absolute_path():
                pkg_path = str(pkg_uri)
            else:
                pkg_path = os.path.join(os.path.dirname(repo.indexuri.get_uri()),
                                        str(uri.path()))

            ctx.ui.info(_("Package URI: %s") % pkg_path, verbose=True)

            # Bug 4113
            cached_file = pisi.package.Package.is_cached(pkg_path)
            if cached_file and util.sha1_file(cached_file) != pkg_hash:
                os.unlink(cached_file)
                cached_file = None

            install_op = Install(pkg_path, ignore_dep)

            # Bug 4113
            if not cached_file:
                downloaded_file = install_op.package.filepath
                if pisi.util.sha1_file(downloaded_file) != pkg_hash:
                    raise pisi.Error(_("Download Error: Package does not match the repository package."))

            return install_op
        else:
            raise Error(_("Package %s not found in any active repository.") % name)
Exemple #11
0
    def add_package(self, path, deltas, repo_uri):
        package = pisi.package.Package(path, 'r')
        md = package.get_metadata()
        md.package.packageSize = long(os.path.getsize(path))
        md.package.packageHash = util.sha1_file(path)
        if ctx.config.options and ctx.config.options.absolute_urls:
            md.package.packageURI = os.path.realpath(path)
        else:                           # create relative path by default
            # TODO: in the future well do all of this with purl/pfile/&helpers
            # really? heheh -- future exa
            md.package.packageURI = util.removepathprefix(repo_uri, path)
        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.error(_('Package %s: metadata corrupt, skipping...') % md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            # No need to carry these with index (#3965)
            md.package.files = None
            md.package.additionalFiles = None

            if md.package.name in deltas:
                name, version, release, distro_id, arch = \
                        util.split_package_filename(path)

                for delta_path in deltas[md.package.name]:
                    src_release, dst_release, delta_distro_id, delta_arch = \
                            util.split_delta_package_filename(delta_path)[1:]

                    # Add only delta to latest build of the package
                    if dst_release != md.package.release or \
                            (delta_distro_id, delta_arch) != (distro_id, arch):
                        continue

                    delta = metadata.Delta()
                    delta.packageURI = util.removepathprefix(repo_uri, delta_path)
                    delta.packageSize = long(os.path.getsize(delta_path))
                    delta.packageHash = util.sha1_file(delta_path)
                    delta.releaseFrom = src_release

                    md.package.deltaPackages.append(delta)

            self.packages.append(md.package)
Exemple #12
0
 def testUnpackTarCond(self):
     spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml')
     targetDir = '/tmp'
     archiv = sourcearchive.SourceArchive(spec, targetDir)
     url = uri.URI(spec.source.archive.uri)
     filePath = join(pisi.context.config.archives_dir(), url.filename())
     if util.sha1_file(filePath) != spec.source.archive.sha1sum:
         fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir)
         fetch.fetch()
     assert spec.source.archive.type == 'targz'
Exemple #13
0
def check(package):
    md, files = info(package, True)
    corrupt = []
    for file in files.list:
        if file.hash and file.type != "config" and not os.path.islink("/" + file.path):
            ctx.ui.info(_("Checking %s...") % file.path, False, True)
            if file.hash != util.sha1_file("/" + file.path):
                corrupt.append(file)
                ctx.ui.info("Corrupt file: %s" % file)
            else:
                ctx.ui.info("OK", False)
    return corrupt
Exemple #14
0
def check(package):
    md, files = info.info(package, True)
    corrupt = []
    for file in files.list:
        if file.hash and file.type != "config" \
           and not os.path.islink('/' + file.path):
            ctx.ui.info(_("Checking %s ") % file.path, noln=True, verbose=True)
            if file.hash != util.sha1_file('/' + file.path):
                corrupt.append(file)
                ctx.ui.info("Corrupt file: %s" % file)
            else:
                ctx.ui.info("OK", verbose=True)
    return corrupt
Exemple #15
0
def check(package):
    md, files = info(package, True)
    corrupt = []
    for file in files.list:
        if file.hash and file.type != "config" \
           and not os.path.islink('/' + file.path):
            ctx.ui.info(_("Checking /%s ") % file.path, noln=True, verbose=True) 
            if file.hash != util.sha1_file('/' + file.path):
                corrupt.append(file)
                ctx.ui.info(_("\nCorrupt file: %s") % file, noln=True)
            else:
                ctx.ui.info(_("OK"), verbose=True)
    return corrupt
Exemple #16
0
def check(package):
    md, files = info(package, True)
    corrupt = []
    for file in files.list:
        if file.hash and file.type != "config" and not os.path.islink("/" + file.path):
            ctx.ui.info(_("Checking /%s ") % file.path, noln=True, verbose=True)
            try:
                if file.hash != util.sha1_file("/" + file.path):
                    corrupt.append(file)
                    ctx.ui.error(_("\nCorrupt file: %s") % file)
                else:
                    ctx.ui.info(_("OK"), verbose=True)
            except pisi.util.FileError, e:
                ctx.ui.error("\n%s" % e)
Exemple #17
0
def calculate_download_sizes(order):
    total_size = cached_size = 0

    for pkg in [ctx.packagedb.get_package(name) for name in order]:

        # get the cached package's path
        fn = util.package_name(pkg.name, pkg.version, pkg.release, pkg.build, True)
        path = util.join_path(ctx.config.packages_dir(), fn)

        # check the file and sha1sum to be sure it _is_ the cached package
        if os.path.exists(path) and util.sha1_file(path) == pkg.packageHash:
            cached_size += pkg.packageSize

        total_size += pkg.packageSize

    ctx.ui.notify(ui.cached, total=total_size, cached=cached_size)
    return total_size, cached_size
Exemple #18
0
def calculate_download_sizes(order):
    total_size = cached_size = 0

    installdb = pisi.db.installdb.InstallDB()
    packagedb = pisi.db.packagedb.PackageDB()

    try:
        cached_packages_dir = ctx.config.cached_packages_dir()
    except OSError:
        # happens when cached_packages_dir tried to be created by an unpriviledged user
        cached_packages_dir = None

    for pkg in [packagedb.get_package(name) for name in order]:

        delta = None
        if installdb.has_package(pkg.name):
            (version, release, build, distro,
             distro_release) = installdb.get_version_and_distro_release(
                 pkg.name)
            # pisi distro upgrade should not use delta support
            if distro == pkg.distribution and distro_release == pkg.distributionRelease:
                delta = pkg.get_delta(buildFrom=build)

        ignore_delta = ctx.config.values.general.ignore_delta

        if delta and not ignore_delta:
            fn = os.path.basename(delta.packageURI)
            pkg_hash = delta.packageHash
            pkg_size = delta.packageSize
        else:
            fn = os.path.basename(pkg.packageURI)
            pkg_hash = pkg.packageHash
            pkg_size = pkg.packageSize

        if cached_packages_dir:
            path = util.join_path(cached_packages_dir, fn)
            # check the file and sha1sum to be sure it _is_ the cached package
            if os.path.exists(path) and util.sha1_file(path) == pkg_hash:
                cached_size += pkg_size
            elif os.path.exists("%s.part" % path):
                cached_size += os.stat("%s.part" % path).st_size

        total_size += pkg_size

    ctx.ui.notify(ui.cached, total=total_size, cached=cached_size)
    return total_size, cached_size
    def testUnpackZipCond(self):
        bctx = BuildContext("tests/pccts/pspec.xml")
        url = uri.URI(bctx.spec.source.archive.uri)
        targetDir = bctx.pkg_work_dir()
        filePath = join(ctx.config.archives_dir(), url.filename())

        # check cached
        if util.sha1_file(filePath) != bctx.spec.source.archive.sha1sum:
            fetch = fetcher.Fetcher(bctx.spec.source.archive.uri, targetDir)
            fetch.fetch()
        assert bctx.spec.source.archive.type == "zip"

        achv = archive.Archive(filePath, bctx.spec.source.archive.type)
        achv.unpack_files(["pccts/history.txt"], targetDir)
        assert pathexists(targetDir + "/pccts")
        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
Exemple #20
0
    def testUnpackZip(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp/pisitest'

        assert spec.source.archive.type == "zip"

        achv = sourcearchive.SourceArchive(spec, targetDir)
        achv.fetch(interactive=False)
        achv.unpack(clean_dir=True)

        assert pathexists(targetDir + "/pccts")

        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
Exemple #21
0
    def testUnpackZipCond(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp'
        achv = sourcearchive.SourceArchive(spec, targetDir)
        url = uri.URI(spec.source.archive.uri)
        filePath = join(ctx.config.archives_dir(), url.filename())

        # check cached
        if util.sha1_file(filePath) != spec.source.archive.sha1sum:
            fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir)
            fetch.fetch()
        assert spec.source.archive.type == "zip"

        achv = archive.Archive(filePath, spec.source.archive.type)
        achv.unpack_files(["pccts/history.txt"], targetDir)
        assert pathexists(targetDir + "/pccts")
        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
Exemple #22
0
    def testUnpackZip(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp/pisitest'

        assert spec.source.archive.type == "zip"

        achv = sourcearchive.SourceArchive(spec, targetDir)
        achv.fetch(interactive=False)
        achv.unpack(clean_dir=True)

        assert pathexists(targetDir + "/pccts")

        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
Exemple #23
0
    def testUnpackZipCond(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp'
        achv = sourcearchive.SourceArchive(spec, targetDir)
        url = uri.URI(spec.source.archive.uri)
        filePath = join(ctx.config.archives_dir(), url.filename())

        # check cached
        if util.sha1_file(filePath) != spec.source.archive.sha1sum:
            fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir)
            fetch.fetch()
        assert spec.source.archive.type == "zip"

        achv = archive.Archive(filePath, spec.source.archive.type)
        achv.unpack_files(["pccts/history.txt"], targetDir)
        assert pathexists(targetDir + "/pccts")
        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    def testUnpackZip(self):
        bctx = BuildContext("tests/pccts/pspec.xml")

        assert bctx.spec.source.archive.type == "zip"

        achv = sourcearchive.SourceArchive(bctx)
        achv.fetch(interactive=False)
        achv.unpack(clean_dir=True)

        targetDir = bctx.pkg_work_dir()
        assert pathexists(targetDir + "/pccts")

        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
Exemple #25
0
 def add_package(self, path, repo_uri):
     package = Package(path, 'r')
     md = package.get_metadata()
     md.package.packageSize = os.path.getsize(path)
     md.package.packageHash = util.sha1_file(path)
     if ctx.config.options and ctx.config.options.absolute_urls:
         md.package.packageURI = os.path.realpath(path)
     else:                           # create relative path by default
         # TODO: in the future well do all of this with purl/pfile/&helpers
         # really? heheh -- future exa
         md.package.packageURI = util.removepathprefix(repo_uri, path)
     # check package semantics
     errs = md.errors()
     if md.errors():
         ctx.ui.error(_('Package %s: metadata corrupt, skipping...') % md.package.name)
         ctx.ui.error(unicode(Error(*errs)))
     else:
         self.packages.append(md.package)
Exemple #26
0
def calculate_download_sizes(order):
    total_size = cached_size = 0

    installdb = pisi.db.installdb.InstallDB()
    packagedb = pisi.db.packagedb.PackageDB()

    try:
        cached_packages_dir = ctx.config.cached_packages_dir()
    except OSError:
        # happens when cached_packages_dir tried to be created by an unpriviledged user
        cached_packages_dir = None

    for pkg in [packagedb.get_package(name) for name in order]:

        delta = None
        if installdb.has_package(pkg.name):
            (version, release, build, distro, distro_release) = installdb.get_version_and_distro_release(pkg.name)
            # pisi distro upgrade should not use delta support
            if distro == pkg.distribution and distro_release == pkg.distributionRelease:
                delta = pkg.get_delta(buildFrom=build)

        ignore_delta = ctx.config.values.general.ignore_delta

        if delta and not ignore_delta:
            fn = os.path.basename(delta.packageURI)
            pkg_hash = delta.packageHash
            pkg_size = delta.packageSize
        else:
            fn = os.path.basename(pkg.packageURI)
            pkg_hash = pkg.packageHash
            pkg_size = pkg.packageSize

        if cached_packages_dir:
            path = util.join_path(cached_packages_dir, fn)
            # check the file and sha1sum to be sure it _is_ the cached package
            if os.path.exists(path) and util.sha1_file(path) == pkg_hash:
                cached_size += pkg_size
            elif os.path.exists("%s.part" % path):
                cached_size += os.stat("%s.part" % path).st_size

        total_size += pkg_size

    ctx.ui.notify(ui.cached, total=total_size, cached=cached_size)
    return total_size, cached_size
Exemple #27
0
    def fetch_remote_file(self, url):
        from fetcher import fetch_url
        dest = ctx.config.packages_dir()
        self.filepath = join(dest, url.filename())

        sha1sum = None
        if exists(self.filepath):
            sha1sum = util.sha1_file(self.filepath)

        name, version = util.parse_package_name(basename(self.filepath))
        if sha1sum != ctx.packagedb.get_package(name).packageHash:
            try:
                fetch_url(url, dest, ctx.ui.Progress)
            except pisi.fetcher.FetchError:
                # Bug 3465
                if ctx.get_option('reinstall'):
                    raise Error(_("There was a problem while fetching '%s'.\nThe package "
                    "may have been upgraded. Please try to upgrade the package.") % url);
                raise
        else:
            ctx.ui.info(_('%s [cached]') % url.filename())
Exemple #28
0
    def testUnpackTar(self):
        spec = SpecFile("tests/popt/pspec.xml")
        targetDir = '/tmp/pisitest'
        achv = sourcearchive.SourceArchive(spec, targetDir)
    
        assert spec.source.archive.type == "targz"

        # skip fetching and directly unpack the previously fetched (by
        # fetchertests) archive
        if not achv.is_cached(interactive=False):
            achv.fetch(interactive=False)
        achv.unpack()
    
        # but testing is hard
        # "var/tmp/pisi/popt-1.7-3/work" (targetDir)
        assert pathexists(targetDir + "/popt-1.7")

        testfile = targetDir + "/popt-1.7/Makefile.am"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "5af9dd7d754f788cf511c57ce0af3d555fed009d")
Exemple #29
0
    def testUnpackTar(self):
        spec = SpecFile("tests/popt/pspec.xml")
        targetDir = '/tmp/pisitest'
        achv = sourcearchive.SourceArchive(spec, targetDir)
    
        assert spec.source.archive.type == "targz"

        # skip fetching and directly unpack the previously fetched (by
        # fetchertests) archive
        if not achv.is_cached(interactive=False):
            achv.fetch(interactive=False)
        achv.unpack()
    
        # but testing is hard
        # "var/tmp/pisi/popt-1.7-3/work" (targetDir)
        assert pathexists(targetDir + "/popt-1.7")

        testfile = targetDir + "/popt-1.7/Makefile.am"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "5af9dd7d754f788cf511c57ce0af3d555fed009d")
def add_package(params):
    try:
        path, deltas, repo_uri = params

        ctx.ui.info("%-80.80s\r" % (_("Adding package to index: %s") % os.path.basename(path)), noln=True)

        package = pisi.package.Package(path, "r")
        md = package.get_metadata()
        md.package.packageSize = long(os.path.getsize(path))
        md.package.packageHash = util.sha1_file(path)
        if ctx.config.options and ctx.config.options.absolute_urls:
            md.package.packageURI = os.path.realpath(path)
        else:
            md.package.packageURI = util.removepathprefix(repo_uri, path)

        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.info("")
            ctx.ui.error(_("Package %s: metadata corrupt, skipping...") % md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            # No need to carry these with index (#3965)
            md.package.files = None
            md.package.additionalFiles = None

            if md.package.name in deltas:
                name, version, release, distro_id, arch = util.split_package_filename(path)

                for delta_path in deltas[md.package.name]:
                    src_release, dst_release, delta_distro_id, delta_arch = util.split_delta_package_filename(
                        delta_path
                    )[1:]

                    # Add only delta to latest build of the package
                    if dst_release != md.package.release or (delta_distro_id, delta_arch) != (distro_id, arch):
                        continue

                    delta = metadata.Delta()
                    delta.packageURI = util.removepathprefix(repo_uri, delta_path)
                    delta.packageSize = long(os.path.getsize(delta_path))
                    delta.packageHash = util.sha1_file(delta_path)
                    delta.releaseFrom = src_release

                    md.package.deltaPackages.append(delta)

        return md.package

    except KeyboardInterrupt:
        # Handle KeyboardInterrupt exception to prevent ugly backtrace of all
        # worker processes and propagate the exception to main process.
        #
        # Probably it's better to use just 'raise' here, but multiprocessing
        # module has some bugs about that: (python#8296, python#9205 and
        # python#9207 )
        #
        # For now, worker processes do not propagate exceptions other than
        # Exception (like KeyboardInterrupt), so we have to manually propagate
        # KeyboardInterrupt exception as an Exception.

        raise Exception
Exemple #31
0
def main():
    global options

    # Parse options
    parser = OptionParser(usage="%prog [options]", version="%prog 1.0")

    parser.add_option("-N", "--no-color",
                      action="store_false", dest="color", default=True,
                      help=_("don't use colors"))
    parser.add_option("-p", "--packages",
                      action="store_true", dest="packages", default=False,
                      help=_("show package names"))
    parser.add_option("-l", "--long",
                      action="store_true", dest="long", default=False,
                      help=_("show details of advisories"))
    parser.add_option("-a", "--all",
                      action="store_false", dest="affected", default=True,
                      help=_("show all advisories"))
    parser.add_option("-F", "--no-fetch",
                      action="store_false", dest="fetch", default=True,
                      help=_("don't download PLSA index"))

    (options, args) = parser.parse_args()
    
    # Get locale
    lang = os.environ["LC_ALL"].split("_")[0]

    # Show package details in --long
    if options.long:
        options.packages = True

    # Create work directory
    if not os.access("/tmp/plsa", os.F_OK):
        os.mkdir("/tmp/plsa")

    # Init PISI API
    pisi.api.init(database=True, comar=False, write=False)

    # Get installed packages
    installed_packages = {}
    for package in ctx.installdb.list_installed():
        # Release comparison seems enough
        installed_packages[package] = int(ctx.installdb.get_version(package)[1])

    # List of orphaned packages
    orphaned = []

    # Get list of reporsitories
    plsas = {}
    for repo in ctx.repodb.list():
        uri = ctx.repodb.get_repo(repo).indexuri.get_uri()
        plsafile = "%s/plsa-index.xml.bz2" % uri[0:uri.rfind("/")]
        tmpfile = "/tmp/plsa/%s.xml" % repo

        if options.fetch:
            print _("Downloading PLSA database of %s") % repo
            try:
                fetch_url(plsafile, "/tmp/plsa", progress=ctx.ui.Progress)
            except FetchError, e:
                print _("Unable to download %s: %s") % (plsafile, e)
                continue

            print _("Checking file integrity of %s") % repo
            try:
                fetch_url("%s.sha1sum" % plsafile, "/tmp/plsa")
            except FetchError, e:
                print _("Unable to download checksum of %s") % repo
                continue

            orig_sha1sum = file("%s.sha1sum" % plsafile).readlines()[0].split()[0]
            if sha1_file(plsafile) != orig_sha1sum:
                print _("File integrity of %s compromised.") % plsafile
                continue

            print _("Unpacking PLSA database of %s") % repo
            try:
                File.decompress("/tmp/plsa/plsa-index.xml.bz2", File.bz2)
            except:
                print _("Unable to decompress %s") % plsafile
                continue
            
            os.rename("/tmp/plsa/plsa-index.xml", tmpfile)
            os.unlink("/tmp/plsa/plsa-index.xml.bz2")
            plsas[repo] = tmpfile
Exemple #32
0
def add_package(params):
    try:
        path, deltas, repo_uri = params

        ctx.ui.info(
            "%-80.80s\r" %
            (_('Adding package to index: %s') % os.path.basename(path)),
            noln=True)

        package = pisi.package.Package(path, 'r')
        md = package.get_metadata()
        md.package.packageSize = long(os.path.getsize(path))
        md.package.packageHash = util.sha1_file(path)
        if ctx.config.options and ctx.config.options.absolute_urls:
            md.package.packageURI = os.path.realpath(path)
        else:
            md.package.packageURI = util.removepathprefix(repo_uri, path)

        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.info("")
            ctx.ui.error(
                _('Package %s: metadata corrupt, skipping...') %
                md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            # No need to carry these with index (#3965)
            md.package.files = None
            md.package.additionalFiles = None

            if md.package.name in deltas:
                name, version, release, distro_id, arch = \
                        util.split_package_filename(path)

                for delta_path in deltas[md.package.name]:
                    src_release, dst_release, delta_distro_id, delta_arch = \
                            util.split_delta_package_filename(delta_path)[1:]

                    # Add only delta to latest build of the package
                    if dst_release != md.package.release or \
                            (delta_distro_id, delta_arch) != (distro_id, arch):
                        continue

                    delta = metadata.Delta()
                    delta.packageURI = util.removepathprefix(
                        repo_uri, delta_path)
                    delta.packageSize = long(os.path.getsize(delta_path))
                    delta.packageHash = util.sha1_file(delta_path)
                    delta.releaseFrom = src_release

                    md.package.deltaPackages.append(delta)

        return md.package

    except KeyboardInterrupt:
        # Handle KeyboardInterrupt exception to prevent ugly backtrace of all
        # worker processes and propagate the exception to main process.
        #
        # Probably it's better to use just 'raise' here, but multiprocessing
        # module has some bugs about that: (python#8296, python#9205 and
        # python#9207 )
        #
        # For now, worker processes do not propagate exceptions other than
        # Exception (like KeyboardInterrupt), so we have to manually propagate
        # KeyboardInterrupt exception as an Exception.

        raise Exception