Exemple #1
0
    def test_opening_raw_data_as_pickled(self):
      "Should return None, opening uncompressed data as compressed"
      rhnCache.set(self.key, "12345", raw=1)

      self.assertEqual(None, rhnCache.get(self.key, raw=0))

      self._cleanup(self.key)
Exemple #2
0
    def test_opening_uncompressed_data_as_compressed(self):
      "Should return None, opening uncompressed data as compressed"
      rhnCache.set(self.key, self.content, raw=1)

      self.assertEqual(None, rhnCache.get(self.key, compressed=1, raw=1))

      self._cleanup(self.key)
Exemple #3
0
 def cache_get(self, object_id, timestamp=None):
     # Get the key
     key = self._get_key(object_id)
     return rhnCache.get(key,
                         modified=timestamp,
                         raw=0,
                         compressed=self._compressed)
Exemple #4
0
def get(name, modified=None, raw=None, compressed=None):
    # Check to see if the entry is in the database, with the right version
    h = _fetch_cursor(key=name, modified=modified)

    row = h.fetchone_dict()

    if not row:
        # Key not found
        return None

    if modified and row['delta'] != 0:
        # Different version
        log_debug(4, "database cache: different version")
        return None

    if modified is None:
        # The caller doesn't care about the modified time, but we do, since we
        # want to fetch the same version from the disk cache
        modified = row['modified']

    if rhnCache.has_key(name, modified):
        # We have the value
        log_debug(4, "Filesystem cache HIT")
        return rhnCache.get(name, modified=modified, raw=raw)

    log_debug(4, "Filesystem cache MISS")

    # The disk cache doesn't have this key at all, or it's a modified value
    # Fetch the value from the database

    v = row['value']
    # Update the accessed field
    rhnSQL.Procedure("rhn_cache_update_accessed")(name)

    if compressed:
        io = cStringIO.StringIO()

        io.write(rhnSQL.read_lob(v))
        io.seek(0, 0)

        # XXX For about 40M of compressed data sometimes we get:
        # zlib.error: Error -3 while decompressing: incomplete dynamic bit lengths tree
        v = gzip.GzipFile(None, "r", 0, io)

    try:
        data = v.read()
    except (ValueError, IOError, gzip.zlib.error) as e:
        # XXX poking at gzip.zlib may not be that well-advised
        log_error("rhnDatabaseCache: gzip error for key %s: %s" % (name, e))
        # Ignore this entry in the database cache, it has invalid data
        return None

    # We store the data in the database cache, in raw format
    rhnCache.set(name, data, modified=modified, raw=1)

    # Unpickle the data, unless raw access was requested
    if not raw:
        return cPickle.loads(data)

    return data
Exemple #5
0
    def test_opening_raw_data_as_pickled(self):
      "Should return None, opening uncompressed data as compressed"
      rhnCache.set(self.key, "12345", raw=1)

      self.assertEqual(None, rhnCache.get(self.key, raw=0))

      self._cleanup(self.key)
Exemple #6
0
    def test_opening_uncompressed_data_as_compressed(self):
      "Should return None, opening uncompressed data as compressed"
      rhnCache.set(self.key, self.content, raw=1)

      self.assertEqual(None, rhnCache.get(self.key, compressed=1, raw=1))

      self._cleanup(self.key)
Exemple #7
0
 def cache_get(self, params):
     log_debug(4, params)
     key = self._get_key(params)
     last_modified = self._get_last_modified(params)
     if not self.use_database_cache:
         return rhnCache.get(key, modified=last_modified, raw=1)
     return rhnDatabaseCache.get(key, modified=last_modified, raw=1,
         compressed=1)
Exemple #8
0
    def dump_channel_packages_short(
        self, channel_label, last_modified, filepath=None, validate_channels=False, send_headers=False, open_stream=True
    ):
        log_debug(2, channel_label)
        if validate_channels:
            channels = self._validate_channels(channel_labels=[channel_label])
            channel_obj = channels[channel_label]
        else:
            channels = channel_label
            channel_obj = channels
        db_last_modified = int(rhnLib.timestamp(channel_obj["last_modified"]))
        last_modified = int(rhnLib.timestamp(last_modified))
        log_debug(3, "last modified", last_modified, "db last modified", db_last_modified)
        if last_modified != db_last_modified:
            raise rhnFault(3013, "The requested channel version does not match" " the upstream version", explain=0)
        channel_id = channel_obj["channel_id"]
        if filepath:
            key = filepath
        else:
            key = "xml-channel-packages/rhn-channel-%d.data" % channel_id
        # Try to get everything off of the cache
        val = rhnCache.get(key, compressed=0, raw=1, modified=last_modified)
        if val is None:
            # Not generated yet
            log_debug(4, "Cache MISS for %s (%s)" % (channel_label, channel_id))
            stream = self._cache_channel_packages_short(channel_id, key, last_modified)
        else:
            log_debug(4, "Cache HIT for %s (%s)" % (channel_label, channel_id))
            temp_stream = tempfile.TemporaryFile()
            temp_stream.write(val)
            temp_stream.flush()
            stream = self._normalize_compressed_stream(temp_stream)

        # Copy the results to the output stream
        # They shold be already compressed if they were requested to be
        # compressed
        buffer_size = 16384
        # Send the HTTP headers - but don't init the compressed stream since
        # we send the data ourselves
        if send_headers:
            self._send_headers(init_compressed_stream=0)
        if open_stream:
            self._raw_stream = open(key, "w")
        while 1:
            buff = stream.read(buffer_size)
            if not buff:
                break
            try:
                self._raw_stream.write(buff)
            except IOError:
                log_error("Client disconnected prematurely")
                self.close()
                raise ClosedConnectionError, None, sys.exc_info()[2]
        # We're done
        if open_stream:
            self._raw_stream.close()
        return 0
Exemple #9
0
    def _test(self, key, content, **modifiers):
        # Blow it away
        rhnCache.CACHEDIR = '/tmp/rhn'
        self._cleanup(key)
        rhnCache.set(key, content, **modifiers)
        self.failUnless(rhnCache.has_key(key))
        content2 = rhnCache.get(key, **modifiers)
        self.assertEqual(content, content2)

        self._cleanup(key)
        self.failIf(rhnCache.has_key(key))
        return (key, content)
Exemple #10
0
    def _test(self, key, content, **modifiers):
        # Blow it away
        rhnCache.CACHEDIR = '/tmp/rhn'
        self._cleanup(key)
        rhnCache.set(key, content, **modifiers)
        self.failUnless(rhnCache.has_key(key))
        content2 = rhnCache.get(key, **modifiers)
        self.assertEqual(content, content2)

        self._cleanup(key)
        self.failIf(rhnCache.has_key(key))
        return (key, content)
Exemple #11
0
def get(name, modified = None, raw = None, compressed = None):
    # Check to see if the entry is in the database, with the right version
    h = _fetch_cursor(key=name, modified=modified)

    row = h.fetchone_dict()

    if not row:
        # Key not found
        return None

    if modified and row['delta'] != 0:
        # Different version
        log_debug(4, "database cache: different version")
        return None

    if modified is None:
        # The caller doesn't care about the modified time, but we do, since we
        # want to fetch the same version from the disk cache
        modified = row['modified']

    if rhnCache.has_key(name, modified):
        # We have the value
        log_debug(4, "Filesystem cache HIT")
        return rhnCache.get(name, modified=modified, raw=raw)

    log_debug(4, "Filesystem cache MISS")

    # The disk cache doesn't have this key at all, or it's a modified value
    # Fetch the value from the database

    v = row['value']
    # Update the accessed field
    rhnSQL.Procedure("rhn_cache_update_accessed")(name)

    if compressed:
        io = cStringIO.StringIO()

        io.write(rhnSQL.read_lob(v))
        io.seek(0, 0)

        # XXX For about 40M of compressed data sometimes we get:
        # zlib.error: Error -3 while decompressing: incomplete dynamic bit lengths tree
        v = gzip.GzipFile(None, "r", 0, io)

    try:
        data = v.read()
    except (ValueError, IOError, gzip.zlib.error), e:
        # XXX poking at gzip.zlib may not be that well-advised
        log_error("rhnDatabaseCache: gzip error for key %s: %s" % (
            name, e))
        # Ignore this entry in the database cache, it has invalid data
        return None
Exemple #12
0
    def no_test_as_streams_1(self):
        "Tests storing and retrieval as streams"
        t = tempfile.TemporaryFile()
        content = self.content * 100
        t.write(content)
        t.seek(0, 0)

        self._cleanup(self.key)
        rhnCache.set(self.key, None, raw=1, stream=t)
        self.failUnless(rhnCache.has_key(self.key))

        ss = rhnCache.get(self.key, as_stream=1)
        self.failUnless(hasattr(ss, "read"))
        content2 = ss.read()

        self.assertEquals(content, content2)
        self._cleanup(self.key)
Exemple #13
0
    def _getHeaderFromFile(self, filePath, stat_info=None):
        """ Wraps around common.rhnRepository's method, adding a caching layer
        If stat_info was already passed, don't re-stat the file
        """
        log_debug(3, filePath)
        if not CFG.CACHE_PACKAGE_HEADERS:
            return rhnRepository.Repository._getHeaderFromFile(
                self, filePath, stat_info=stat_info)
        # Ignore stat_info for now - nobody sets it anyway
        stat_info = None
        try:
            stat_info = os.stat(filePath)
        except:
            raise_with_tb(
                rhnFault(
                    17,
                    "Unable to read package %s" % os.path.basename(filePath)),
                sys.exc_info()[2])
        lastModified = stat_info[stat.ST_MTIME]

        # OK, file exists, check the cache
        cache_key = os.path.normpath("headers/" + filePath)
        header = rhnCache.get(cache_key,
                              modified=lastModified,
                              raw=1,
                              compressed=1)
        if header:
            # We're good to go
            log_debug(2, "Header cache HIT for %s" % filePath)
            extra_headers = {
                'X-RHN-Package-Header': os.path.basename(filePath),
            }
            self._set_last_modified(lastModified, extra_headers=extra_headers)
            return header
        log_debug(3, "Header cache MISS for %s" % filePath)
        header = rhnRepository.Repository._getHeaderFromFile(
            self, filePath, stat_info=stat_info)
        if header:
            rhnCache.set(cache_key,
                         header,
                         modified=lastModified,
                         raw=1,
                         compressed=1)
        return header
Exemple #14
0
    def _getHeaderFromFile(self, filePath, stat_info=None):
        """ Wraps around common.rhnRepository's method, adding a caching layer
        If stat_info was already passed, don't re-stat the file
        """
        log_debug(3, filePath)
        if not CFG.CACHE_PACKAGE_HEADERS:
            return rhnRepository.Repository._getHeaderFromFile(self, filePath,
                                                               stat_info=stat_info)
        # Ignore stat_info for now - nobody sets it anyway
        stat_info = None
        try:
            stat_info = os.stat(filePath)
        except:
            raise rhnFault(17, "Unable to read package %s"
                               % os.path.basename(filePath)), None, sys.exc_info()[2]
        lastModified = stat_info[stat.ST_MTIME]

        # OK, file exists, check the cache
        cache_key = os.path.normpath("headers/" + filePath)
        header = rhnCache.get(cache_key, modified=lastModified, raw=1,
                              compressed=1)
        if header:
            # We're good to go
            log_debug(2, "Header cache HIT for %s" % filePath)
            extra_headers = {
                'X-RHN-Package-Header': os.path.basename(filePath),
            }
            self._set_last_modified(lastModified, extra_headers=extra_headers)
            return header
        log_debug(3, "Header cache MISS for %s" % filePath)
        header = rhnRepository.Repository._getHeaderFromFile(self, filePath,
                                                             stat_info=stat_info)
        if header:
            rhnCache.set(cache_key, header, modified=lastModified, raw=1,
                         compressed=1)
        return header
Exemple #15
0
 def test_missing_1(self):
     "Tests exceptions raised by the code"
     self._cleanup(self.key)
     self.assertEqual(None, rhnCache.get(self.key))
Exemple #16
0
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
from spacewalk.common import rhnCache

key = "/var/goo/goo"

data = "0123456789" * 1024 * 1024

rhnCache.set(key, data, compressed=1, raw=1)
assert data == rhnCache.get(key, compressed=1, raw=1)

rhnCache.set(key, "12345", raw=1)
# Should return None, opening uncompressed data as compressed
assert None == rhnCache.get(key, compressed=1, raw=1)

# Should return None, opening raw data as pickled
assert None == rhnCache.get(key, raw=0)
Exemple #17
0
    def poll_packages(self, release, server_arch, timestamp=0, uuid=None):
        log_debug(1, release, server_arch, timestamp, uuid)

        # make sure we're dealing with strings here
        release = str(release)
        server_arch = rhnLib.normalize_server_arch(server_arch)
        timestamp = str(timestamp)
        uuid = str(uuid)

        # get a list of acceptable channels
        channel_list = []

        channel_list = rhnChannel.applet_channels_for_uuid(uuid)

        # it's possible the tie between uuid and rhnServer.id wasn't yet
        # made, default to normal behavior
        if not channel_list:
            channel_list = rhnChannel.get_channel_for_release_arch(
                release, server_arch)
            channel_list = [channel_list]
        # bork if no channels returned
        if not channel_list:
            log_debug(
                8, "No channels for release = '%s', arch = '%s', uuid = '%s'" %
                (release, server_arch, uuid))
            return {'last_modified': 0, 'contents': []}

        last_channel_changed_ts = max(
            [a["last_modified"] for a in channel_list])

        # make satellite content override a cache caused by hosted
        last_channel_changed_ts = str(LongType(last_channel_changed_ts) + 1)

        # gotta be careful about channel unsubscriptions...
        client_cache_invalidated = None

        # we return rhnServer.channels_changed for each row
        # in the satellite case, pluck it off the first...
        if "server_channels_changed" in channel_list[0]:
            sc_ts = channel_list[0]["server_channels_changed"]

            if sc_ts and (sc_ts >= last_channel_changed_ts):
                client_cache_invalidated = 1

        if (last_channel_changed_ts <=
                timestamp) and (not client_cache_invalidated):
            # XXX: I hate these freaking return codes that return
            # different members in the dictinary depending on what
            # sort of data you get
            log_debug(3, "Client has current data")
            return {'use_cached_copy': 1}

        # we'll have to return something big - compress
        rhnFlags.set("compress_response", 1)

        # Mark the response as being already XMLRPC-encoded
        rhnFlags.set("XMLRPC-Encoded-Response", 1)

        # next, check the cache if we have something with this timestamp
        label_list = [str(a["id"]) for a in channel_list]
        label_list.sort()
        log_debug(4, "label_list", label_list)
        cache_key = "applet-poll-%s" % string.join(label_list, "-")

        ret = rhnCache.get(cache_key, last_channel_changed_ts)
        if ret:  # we have a good entry with matching timestamp
            log_debug(3, "Cache HIT for", cache_key)
            return ret

        # damn, need to do some real work from chip's requirements:
        # The package list should be an array of hashes with the keys
        # nvre, name, version, release, epoch, errata_advisory,
        # errata_id, with the errata fields being empty strings if the
        # package isn't from an errata.
        ret = {'last_modified': last_channel_changed_ts, 'contents': []}

        # we search for packages only in the allowed channels - build
        # the SQL helper string and dictionary to make the foo IN (
        # list ) constructs use bind variables
        qlist = []
        qdict = {}
        for c in channel_list:
            v = c["id"]
            k = "channel_%s" % v
            qlist.append(":%s" % k)
            qdict[k] = v
        qlist = string.join(qlist, ", ")

        # This query is kind of big. One of these days I'm gonna start
        # pulling them out and transforming them into views. We can
        # also simulate this using several functions exposed out of
        # rhnChannel, but there is no difference in speed because we
        # need to do more than one query; besides, we cache the hell
        # out of it
        h = rhnSQL.prepare("""
        select distinct
            pn.name,
            pe.version,
            pe.release,
            pe.epoch,
            e_sq.errata_advisory,
            e_sq.errata_synopsis,
            e_sq.errata_id
        from
            rhnPackageName pn,
            rhnPackageEVR pe,
            rhnChannelNewestPackage cnp
        left join
            (   select  sq_e.id as errata_id,
                        sq_e.synopsis as errata_synopsis,
                        sq_e.advisory as errata_advisory,
                        sq_ep.package_id
                from
                        rhnErrata sq_e,
                        rhnErrataPackage sq_ep,
                        rhnChannelErrata sq_ce
                where   sq_ce.errata_id = sq_ep.errata_id
                        and sq_ce.errata_id = sq_e.id
                        and sq_ce.channel_id in ( %s )
            ) e_sq
          on cnp.package_id = e_sq.package_id
        where
            cnp.channel_id in ( %s )
        and cnp.name_id = pn.id
        and cnp.evr_id = pe.id
        """ % (qlist, qlist))
        h.execute(**qdict)

        plist = h.fetchall_dict()

        if not plist:
            # We've set XMLRPC-Encoded-Response above
            ret = xmlrpclib.dumps((ret, ), methodresponse=1)
            return ret

        contents = {}

        for p in plist:
            for k in list(p.keys()):
                if p[k] is None:
                    p[k] = ""
            p["nevr"] = "%s-%s-%s:%s" % (p["name"], p["version"], p["release"],
                                         p["epoch"])
            p["nvr"] = "%s-%s-%s" % (p["name"], p["version"], p["release"])

            pkg_name = p["name"]

            if pkg_name in contents:
                stored_pkg = contents[pkg_name]

                s = [
                    stored_pkg["name"], stored_pkg["version"],
                    stored_pkg["release"], stored_pkg["epoch"]
                ]

                n = [p["name"], p["version"], p["release"], p["epoch"]]

                log_debug(7, "comparing vres", s, n)
                if rhn_rpm.nvre_compare(s, n) < 0:
                    log_debug(7, "replacing %s with %s" % (pkg_name, p))
                    contents[pkg_name] = p
                else:
                    # already have a higher vre stored...
                    pass
            else:
                log_debug(7, "initial store for %s" % pkg_name)
                contents[pkg_name] = p

        ret["contents"] = list(contents.values())

        # save it in the cache
        # We've set XMLRPC-Encoded-Response above
        ret = xmlrpclib.dumps((ret, ), methodresponse=1)
        rhnCache.set(cache_key, ret, last_channel_changed_ts)

        return ret
Exemple #18
0
 def test_missing_1(self):
     "Tests exceptions raised by the code"
     self._cleanup(self.key)
     self.assertEqual(None, rhnCache.get(self.key))
Exemple #19
0
    def poll_packages(self, release, server_arch, timestamp = 0, uuid = None):
        log_debug(1, release, server_arch, timestamp, uuid)

        # make sure we're dealing with strings here
        release = str(release)
        server_arch = rhnLib.normalize_server_arch(server_arch)
        timestamp = str(timestamp)
        uuid = str(uuid)

        # get a list of acceptable channels
        channel_list = []

        channel_list = rhnChannel.applet_channels_for_uuid(uuid)

        # it's possible the tie between uuid and rhnServer.id wasn't yet
        # made, default to normal behavior
        if not channel_list:
            channel_list = rhnChannel.get_channel_for_release_arch(release,
                                                                   server_arch)
            channel_list = [channel_list]
        # bork if no channels returned
        if not channel_list:
            log_debug(8, "No channels for release = '%s', arch = '%s', uuid = '%s'" % (
                release, server_arch, uuid))
            return { 'last_modified' : 0, 'contents' : [] }

        last_channel_changed_ts = max(map(lambda a: a["last_modified"], channel_list))

        # make satellite content override a cache caused by hosted
        last_channel_changed_ts = str(long(last_channel_changed_ts) + 1)

        # gotta be careful about channel unsubscriptions...
        client_cache_invalidated = None

        # we return rhnServer.channels_changed for each row
        # in the satellite case, pluck it off the first...
        if channel_list[0].has_key("server_channels_changed"):
           sc_ts = channel_list[0]["server_channels_changed"]

           if sc_ts and (sc_ts >= last_channel_changed_ts):
               client_cache_invalidated = 1


        if (last_channel_changed_ts <= timestamp) and (not client_cache_invalidated):
            # XXX: I hate these freaking return codes that return
            # different members in the dictinary depending on what
            # sort of data you get
            log_debug(3, "Client has current data")
            return { 'use_cached_copy' : 1 }

        # we'll have to return something big - compress
        rhnFlags.set("compress_response", 1)

        # Mark the response as being already XMLRPC-encoded
        rhnFlags.set("XMLRPC-Encoded-Response", 1)

        # next, check the cache if we have something with this timestamp
        label_list = map(lambda a: str(a["id"]), channel_list)
        label_list.sort()
        log_debug(4, "label_list", label_list)
        cache_key = "applet-poll-%s" % string.join(label_list, "-")

        ret = rhnCache.get(cache_key, last_channel_changed_ts)
        if ret: # we have a good entry with matching timestamp
            log_debug(3, "Cache HIT for", cache_key)
            return ret

        # damn, need to do some real work from chip's requirements:
        # The package list should be an array of hashes with the keys
        # nvre, name, version, release, epoch, errata_advisory,
        # errata_id, with the errata fields being empty strings if the
        # package isn't from an errata.
        ret = { 'last_modified' : last_channel_changed_ts, 'contents' : [] }

        # we search for packages only in the allowed channels - build
        # the SQL helper string and dictionary to make the foo IN (
        # list ) constructs use bind variables
        qlist = []
        qdict = {}
        for c in channel_list:
            v = c["id"]
            k = "channel_%s" % v
            qlist.append(":%s" % k)
            qdict[k] = v
        qlist = string.join(qlist, ", ")

        # This query is kind of big. One of these days I'm gonna start
        # pulling them out and transforming them into views. We can
        # also simulate this using several functions exposed out of
        # rhnChannel, but there is no difference in speed because we
        # need to do more than one query; besides, we cache the hell
        # out of it
        h = rhnSQL.prepare("""
        select distinct
            pn.name,
            pe.version,
            pe.release,
            pe.epoch,
            e_sq.errata_advisory,
            e_sq.errata_synopsis,
            e_sq.errata_id
        from
            rhnPackageName pn,
            rhnPackageEVR pe,
            rhnChannelNewestPackage cnp
        left join
	    (	select	sq_e.id as errata_id,
			sq_e.synopsis as errata_synopsis,
			sq_e.advisory as errata_advisory,
			sq_ep.package_id
		from
			rhnErrata sq_e,
			rhnErrataPackage sq_ep,
			rhnChannelErrata sq_ce
		where	sq_ce.errata_id = sq_ep.errata_id
			and sq_ce.errata_id = sq_e.id
			and sq_ce.channel_id in ( %s )
	    ) e_sq
          on cnp.package_id = e_sq.package_id
        where
            cnp.channel_id in ( %s )
        and cnp.name_id = pn.id
        and cnp.evr_id = pe.id
        """ % (qlist, qlist))
        h.execute(**qdict)

        plist = h.fetchall_dict()

        if not plist:
            # We've set XMLRPC-Encoded-Response above
            ret = xmlrpclib.dumps((ret, ), methodresponse=1)
            return ret

        contents = {}

        for p in plist:
            for k in p.keys():
                if p[k] is None:
                    p[k] = ""
            p["nevr"] = "%s-%s-%s:%s" % (
                 p["name"], p["version"], p["release"], p["epoch"])
            p["nvr"] = "%s-%s-%s" % (p["name"], p["version"], p["release"])

            pkg_name = p["name"]

            if contents.has_key(pkg_name):
                stored_pkg = contents[pkg_name]

                s = [ stored_pkg["name"],
                      stored_pkg["version"],
                      stored_pkg["release"],
                      stored_pkg["epoch"] ]

                n = [ p["name"],
                      p["version"],
                      p["release"],
                      p["epoch"] ]

                log_debug(7, "comparing vres", s, n)
                if rhn_rpm.nvre_compare(s, n) < 0:
                    log_debug(7, "replacing %s with %s" % (pkg_name, p))
                    contents[pkg_name] = p
                else:
                    # already have a higher vre stored...
                    pass
            else:
                log_debug(7, "initial store for %s" % pkg_name)
                contents[pkg_name] = p

        ret["contents"] = contents.values()

        # save it in the cache
        # We've set XMLRPC-Encoded-Response above
        ret = xmlrpclib.dumps((ret, ), methodresponse=1)
        rhnCache.set(cache_key, ret, last_channel_changed_ts)

        return ret
Exemple #20
0
    def dump_channel_packages_short(self,
                                    channel_label,
                                    last_modified,
                                    filepath=None,
                                    validate_channels=False,
                                    send_headers=False,
                                    open_stream=True):
        log_debug(2, channel_label)
        if validate_channels:
            channels = self._validate_channels(channel_labels=[channel_label])
            channel_obj = channels[channel_label]
        else:
            channels = channel_label
            channel_obj = channels
        db_last_modified = int(rhnLib.timestamp(channel_obj['last_modified']))
        last_modified = int(rhnLib.timestamp(last_modified))
        log_debug(3, "last modified", last_modified, "db last modified",
                  db_last_modified)
        if last_modified != db_last_modified:
            raise rhnFault(3013, "The requested channel version does not match"
                           " the upstream version",
                           explain=0)
        channel_id = channel_obj['channel_id']
        if filepath:
            key = filepath
        else:
            key = "xml-channel-packages/rhn-channel-%d.data" % channel_id
        # Try to get everything off of the cache
        val = rhnCache.get(key, compressed=0, raw=1, modified=last_modified)
        if val is None:
            # Not generated yet
            log_debug(4,
                      "Cache MISS for %s (%s)" % (channel_label, channel_id))
            stream = self._cache_channel_packages_short(
                channel_id, key, last_modified)
        else:
            log_debug(4, "Cache HIT for %s (%s)" % (channel_label, channel_id))
            temp_stream = tempfile.TemporaryFile()
            temp_stream.write(val)
            temp_stream.flush()
            stream = self._normalize_compressed_stream(temp_stream)

        # Copy the results to the output stream
        # They shold be already compressed if they were requested to be
        # compressed
        buffer_size = 16384
        # Send the HTTP headers - but don't init the compressed stream since
        # we send the data ourselves
        if send_headers:
            self._send_headers(init_compressed_stream=0)
        if open_stream:
            self._raw_stream = open(key, "w")
        while 1:
            buff = stream.read(buffer_size)
            if not buff:
                break
            try:
                self._raw_stream.write(buff)
            except IOError:
                log_error("Client disconnected prematurely")
                self.close()
                raise_with_tb(ClosedConnectionError, sys.exc_info()[2])
        # We're done
        if open_stream:
            self._raw_stream.close()
        return 0
Exemple #21
0
 def cache_get(self, params):
     log_debug(4, params)
     key = self._get_key(params)
     last_modified = self._get_last_modified(params)
     return rhnCache.get(key, modified=last_modified, raw=1)
Exemple #22
0
#
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
# 
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation. 
#
from spacewalk.common import rhnCache

key = "/var/goo/goo"

data = "0123456789" * 1024 * 1024

rhnCache.set(key, data, compressed=1, raw=1)
assert data == rhnCache.get(key, compressed=1, raw=1)

rhnCache.set(key, "12345", raw=1)
# Should return None, opening uncompressed data as compressed
assert None == rhnCache.get(key, compressed=1, raw=1)

# Should return None, opening raw data as pickled
assert None == rhnCache.get(key, raw=0)
Exemple #23
0
 def cache_get(self, params):
     log_debug(4, params)
     key = self._get_key(params)
     last_modified = self._get_last_modified(params)
     return rhnCache.get(key, modified=last_modified, raw=1)
Exemple #24
0
 def __getitem__(self, key):
     rkey = self._compute_key(key)
     # We want a dictionary-like behaviour, so if the key is not present,
     # raise an exception (that's what missing_is_null=0 does)
     val = rhnCache.get(rkey, missing_is_null=0)
     return val
Exemple #25
0
    def __init__(self,
                 channel_label,
                 repo_type,
                 url=None,
                 fail=False,
                 filters=None,
                 no_errata=False,
                 sync_kickstart=False,
                 latest=False,
                 metadata_only=False,
                 strict=0,
                 excluded_urls=None,
                 no_packages=False,
                 log_dir="reposync",
                 log_level=None,
                 force_kickstart=False,
                 force_all_errata=False,
                 check_ssl_dates=False,
                 force_null_org_content=False):
        self.regen = False
        self.fail = fail
        self.filters = filters or []
        self.no_packages = no_packages
        self.no_errata = no_errata
        self.sync_kickstart = sync_kickstart
        self.force_all_errata = force_all_errata
        self.force_kickstart = force_kickstart
        self.latest = latest
        self.metadata_only = metadata_only
        self.ks_tree_type = 'externally-managed'
        self.ks_install_type = None

        initCFG('server.satellite')
        rhnSQL.initDB()

        # setup logging
        log_filename = channel_label + '.log'
        log_path = default_log_location + log_dir + '/' + log_filename
        if log_level is None:
            log_level = 0
        CFG.set('DEBUG', log_level)
        rhnLog.initLOG(log_path, log_level)
        # os.fchown isn't in 2.4 :/
        if isSUSE():
            os.system("chgrp www " + log_path)
        else:
            os.system("chgrp apache " + log_path)

        log2disk(0, "Command: %s" % str(sys.argv))
        log2disk(0, "Sync of channel started.")

        self.channel_label = channel_label
        self.channel = self.load_channel()
        if not self.channel:
            log(0, "Channel %s does not exist." % channel_label)

        if not self.channel['org_id'] or force_null_org_content:
            self.org_id = None
        else:
            self.org_id = int(self.channel['org_id'])

        if not url:
            # TODO:need to look at user security across orgs
            h = rhnSQL.prepare("""select s.id, s.source_url, s.label
                                  from rhnContentSource s,
                                       rhnChannelContentSource cs
                                 where s.id = cs.source_id
                                   and cs.channel_id = :channel_id""")
            h.execute(channel_id=int(self.channel['id']))
            source_data = h.fetchall_dict()
            self.urls = []
            if excluded_urls is None:
                excluded_urls = []
            if source_data:
                for row in source_data:
                    if row['source_url'] not in excluded_urls:
                        self.urls.append(
                            (row['id'], row['source_url'], row['label']))
        else:
            self.urls = [(None, u, None) for u in url]

        if not self.urls:
            log2(0,
                 0,
                 "Channel %s has no URL associated" % channel_label,
                 stream=sys.stderr)

        self.repo_plugin = self.load_plugin(repo_type)
        self.strict = strict
        self.all_packages = []
        self.check_ssl_dates = check_ssl_dates
        # Init cache for computed checksums to not compute it on each reposync run again
        self.checksum_cache = rhnCache.get(checksum_cache_filename)
        if self.checksum_cache is None:
            self.checksum_cache = {}
Exemple #26
0
 def cache_get(self, object_id, timestamp=None):
     # Get the key
     key = self._get_key(object_id)
     return rhnCache.get(key, modified=timestamp, raw=0,
         compressed=self._compressed)