Ejemplo n.º 1
0
    def test_timeout(self):
        timeout = 3
        sleep_time = 6

        def reset_timeout(timeout=fetcher.SOCKETTIMEOUT):
            fetcher.SOCKETTIMEOUT = timeout

        reset_timeout(timeout)
        self.addCleanup(reset_timeout)

        headers = []

        def handler(request):
            time.sleep(sleep_time)
            request.send_error(404, "After timeout sleep")
            request.send_header("Content-Length", "6")
            request.wfile.write("Hello!")

        started = time.time()

        # We hide errors here because we know we'll get a broken pipe on
        # the server side if the test succeeds.
        self.start_server(handler, hide_errors=True)
        self.fetcher.enqueue(URL)
        self.fetcher.run(progress=Progress())
        self.assertTrue(timeout <= (time.time() - started) < sleep_time - 1)

        item = self.fetcher.getItem(URL)
Ejemplo n.º 2
0
 def __init__(self, fetcher, channels=None, progress=None):
     self._channels = channels or []
     self._fetcher = fetcher
     self._progress = progress or Progress()
     self._available = {}
     self._media = {}
     if self._channels:
         self.compute()
Ejemplo n.º 3
0
    def test_user_agent(self):
        headers = []

        def handler(request):
            headers[:] = request.headers.headers

        self.start_server(handler)
        self.fetcher.enqueue(URL)
        self.fetcher.run(progress=Progress())
        self.assertTrue(("User-Agent: smart/%s\r\n" % VERSION) in headers)
Ejemplo n.º 4
0
    def setUp(self):
        self.progress = Progress()
        self.fetcher = Fetcher()
        self.cache = Cache()

        self.download_dir = self.makeDir()
        self.fetcher.setLocalPathPrefix(self.download_dir + "/")

        # Disable caching so that things blow up when not found.
        self.fetcher.setCaching(NEVER)

        sysconf.set("deb-arch", "i386")
Ejemplo n.º 5
0
    def setUp(self):
        self.progress = Progress()
        self.fetcher = Fetcher()
        self.cache = Cache()

        self.download_dir = self.makeDir()
        self.fetcher.setLocalPathPrefix(self.download_dir + "/")

        # Disable caching so that things blow up when not found.
        self.fetcher.setCaching(NEVER)

        sysconf.set("deb-arch", "i386")
        self.trustdb = open("%s/aptdeb/trustdb.gpg" % TESTDATADIR).read()
Ejemplo n.º 6
0
    def setUp(self):
        self.progress = Progress()
        self.fetcher = Fetcher()
        self.cache = Cache()

        self.download_dir = self.makeDir()
        self.fetcher.setLocalPathPrefix(self.download_dir + "/")

        # Disable caching so that things blow up when not found.
        self.fetcher.setCaching(NEVER)

        # Make sure to trigger old bugs in debug error reporting.
        sysconf.set("log-level", DEBUG)
Ejemplo n.º 7
0
    def test_404_handling(self):
        headers = []

        def handler(request):
            request.send_error(404, "An expected error")
            request.send_header("Content-Length", "6")
            request.wfile.write("Hello!")

        self.start_server(handler)
        self.fetcher.enqueue(URL)
        self.fetcher.run(progress=Progress())
        item = self.fetcher.getItem(URL)
        self.assertEquals(item.getFailedReason(), u"File not found")
Ejemplo n.º 8
0
    def test_401_handling(self):
        headers = []

        def handler(request):
            request.send_error(401, "Authorization Required")
            request.send_header("Content-Length", "17")
            request.wfile.write("401 Unauthorized.")

        self.start_server(handler)
        self.fetcher.enqueue(URL)
        self.fetcher.run(progress=Progress())
        item = self.fetcher.getItem(URL)
        self.assertEquals(item.getStatus(), FAILED)
Ejemplo n.º 9
0
    def test_ratelimit(self):
        bytes = 30
        rate_limit = 10

        sysconf.set("max-download-rate", rate_limit, soft=True)

        def handler(request):
            request.send_header("Content-Length", str(bytes))
            request.wfile.write(" " * bytes)

        self.start_server(handler)
        self.fetcher.enqueue(URL)
        start = time.time()
        self.fetcher.run(progress=Progress())
        stop = time.time()
        elapsed_time = stop - start

        self.assertTrue(elapsed_time >= bytes / rate_limit)
Ejemplo n.º 10
0
    def setUp(self):
        self.channel = createChannel(
            "alias", {
                "type": "apt-deb",
                "baseurl": "file://%s/deb" % TESTDATADIR,
                "distribution": "./"
            })

        class TestInterface(Interface):
            output = []

            def message(self, level, msg):
                self.output.append((level, msg))

            def showOutput(self, data):
                self.output.append(data)

        self.iface = TestInterface(ctrl)

        self.progress = Progress()
        self.fetcher = Fetcher()
        self.cache = Cache()

        self.channel.fetch(self.fetcher, self.progress)
        self.loader = self.channel.getLoaders()[0]
        self.cache.addLoader(self.loader)

        self.old_iface = iface.object
        self.old_sysconf = pickle.dumps(sysconf.object)

        iface.object = self.iface

        self.cache.load()

        self.pm = DebPackageManager()

        # skip test if dpkg is unavailable
        dpkg = sysconf.get("dpkg", "dpkg")
        output = tempfile.TemporaryFile()
        status = self.pm.dpkg([dpkg, "--version"], output)
        if not os.WIFEXITED(status) or os.WEXITSTATUS(status) != 0:
            if not hasattr(self, 'skipTest'):  # Python < 2.7
                self.skipTest = self.fail  # error
            self.skipTest("%s not found" % dpkg)
Ejemplo n.º 11
0
    def search(self, searcher):
        offsets = {}
        for pkg in self._packages:
            offsets[pkg.loaders[self]] = pkg

        for section, offset in self.getSections(Progress()):
            pkg = offsets.get(offset)
            if not pkg:
                continue

            ratio = 0
            if searcher.group:
                group = self._sections[pkg]
                for pat in searcher.group:
                    if pat.search(group):
                        ratio = 1
                        break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue

            if searcher.summary or searcher.description:
                toks = section.get("description", "").split("\n", 1)
                if len(toks) == 2:
                    summary, description = toks
                else:
                    summary, description = toks[0], ""

            if searcher.summary:
                for pat in searcher.summary:
                    if pat.search(summary):
                        ratio = 1
                        break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue
            if searcher.description:
                for pat in searcher.description:
                    if pat.search(description):
                        ratio = 1
                        break
            if ratio:
                searcher.addResult(pkg, ratio)
Ejemplo n.º 12
0
    def test_remove_pragma_no_cache_from_curl(self):
        fetcher.enablePycurl()
        headers = []

        def handler(request):
            headers[:] = request.headers.headers

        self.start_server(handler)
        old_http_proxy = os.environ.get("http_proxy")
        os.environ["http_proxy"] = URL
        try:
            self.fetcher.enqueue(URL)
            self.fetcher.run(progress=Progress())
        finally:
            if old_http_proxy:
                os.environ["http_proxy"] = old_http_proxy
            else:
                del os.environ["http_proxy"]
        self.assertTrue("Pragma: no-cache\r\n" not in headers)
Ejemplo n.º 13
0
    def reloadChannels(self, channels=None, caching=ALWAYS):

        if channels is None:
            manual = False
            self.rebuildSysConfChannels()
            self.rebuildDynamicChannels()
            channels = self._channels.values()
            hooks.call("reload-channels", channels)
        else:
            manual = True

        # Get channels directory and check the necessary locks.
        channelsdir = os.path.join(sysconf.get("data-dir"), "channels/")
        userchannelsdir = os.path.join(sysconf.get("user-data-dir"),
                                       "channels/")
        if not os.path.isdir(channelsdir):
            try:
                os.makedirs(channelsdir)
            except OSError:
                raise Error, _("Unable to create channel directory.")
        if caching is ALWAYS:
            if sysconf.getReadOnly() and os.access(channelsdir, os.W_OK):
                iface.warning(
                    _("The Smart library is already in use by "
                      "another process."))
                iface.warning(_("Configuration is in readonly mode!"))
            if not self._pathlocks.lock(channelsdir):
                raise Error, _("Channel information is locked for writing.")
        elif sysconf.getReadOnly():
            raise Error, _("Can't update channels in readonly mode.")
        elif not self._pathlocks.lock(channelsdir, exclusive=True):
            raise Error, _("Can't update channels with active readers.")
        self._fetcher.setLocalDir(channelsdir, mangle=True)

        # Prepare progress. If we're reading from the cache, we don't want
        # too much information being shown. Otherwise, ask for a full-blown
        # progress for the interface, and build information of currently
        # available packages to compare later.
        if caching is ALWAYS:
            progress = Progress()
        else:
            progress = iface.getProgress(self._fetcher, True)
            oldpkgs = {}
            for pkg in self._cache.getPackages():
                oldpkgs[(pkg.name, pkg.version)] = True
        progress.start()
        steps = 0
        for channel in channels:
            steps += channel.getFetchSteps()
        progress.set(0, steps)

        # Rebuild mirror information.
        self.reloadMirrors()

        self._fetcher.setForceMountedCopy(True)

        self._cache.reset()

        # Do the real work.
        result = True
        for channel in channels:
            digest = channel.getDigest()
            if not manual and channel.hasManualUpdate():
                self._fetcher.setCaching(ALWAYS)
            else:
                self._fetcher.setCaching(caching)
                if channel.getFetchSteps() > 0:
                    progress.setTopic(
                        _("Fetching information for '%s'...") %
                        (channel.getName() or channel.getAlias()))
                    progress.show()
            self._fetcher.setForceCopy(channel.isRemovable())
            self._fetcher.setLocalPathPrefix(channel.getAlias() + "%%")
            try:
                if not channel.fetch(self._fetcher, progress):
                    iface.debug(_("Failed fetching channel '%s'") % channel)
                    result = False
            except Error, e:
                iface.error(unicode(e))
                iface.debug(_("Failed fetching channel '%s'") % channel)
                result = False
            if (channel.getDigest() != digest
                    and isinstance(channel, PackageChannel)):
                channel.addLoaders(self._cache)
                if channel.getAlias() in self._sysconfchannels:
                    self._cachechanged = True
Ejemplo n.º 14
0
    def search(self, searcher):
        for h, offset in self.getHeaders(Progress()):
            pkg = self._offsets.get(offset)
            if not pkg:
                continue

            ratio = 0
            if searcher.url:
                refurl = h[rpm.RPMTAG_URL]
                if refurl:
                    for url, cutoff in searcher.url:
                        _, newratio = globdistance(url, refurl, cutoff)
                        if newratio > ratio:
                            ratio = newratio
                            if ratio == 1:
                                break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue
            if searcher.path:
                paths = h[rpm.RPMTAG_OLDFILENAMES]
                if paths:
                    for spath, cutoff in searcher.path:
                        for path in paths:
                            _, newratio = globdistance(spath, path, cutoff)
                            if newratio > ratio:
                                ratio = newratio
                                if ratio == 1:
                                    break
                        else:
                            continue
                        break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue
            if searcher.group:
                group = self._groups[pkg]
                for pat in searcher.group:
                    if pat.search(group):
                        ratio = 1
                        break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue
            if searcher.summary:
                summary = h[rpm.RPMTAG_SUMMARY]
                for pat in searcher.summary:
                    if pat.search(summary):
                        ratio = 1
                        break
            if ratio == 1:
                searcher.addResult(pkg, ratio)
                continue
            if searcher.description:
                description = h[rpm.RPMTAG_DESCRIPTION]
                for pat in searcher.description:
                    if pat.search(description):
                        ratio = 1
                        break
            if ratio:
                searcher.addResult(pkg, ratio)
Ejemplo n.º 15
0
 def getSubProgress(self, obj):
     if not hasattr(self, "_progress"):
         self._progress = Progress()
     return self._progress
Ejemplo n.º 16
0
 def getProgress(self, obj, hassub=False):
     if not hasattr(self, "_progress"):
         self._progress = Progress()
     return self._progress