Example #1
0
 def SaveData(self, filename):
     gc.collect()
     self.Freeze()
     try:
         sdict = {"qList": copy.copy(self.qList),
                  "qRef": copy.copy(self.qRef),
                  "tagRef": self.tagRef,
                  "binStorage": self.binStorage,
                  "tempStorage": self.tempStorage,
                  "schedWatcher": self.schedWatcher,
                  "connManager": self.connManager}
         tmpFilename = filename + ".tmp"
         with open(tmpFilename, "w") as backup_printer:
             pickler = Pickler(backup_printer, 2)
             pickler.dump(sdict)
     except:
         logging.exception("Backup error")
     finally:
         self.UnFreeze()
     os.rename(tmpFilename, filename)
     if self.context.useMemProfiler:
         try:
             last_heap = self.LastHeap
             self.LastHeap = self.HpyInstance.heap()
             heapsDiff = self.LastHeap.diff(last_heap) if last_heap else self.LastHeap
             logging.info("memory changes: %s", heapsDiff)
             logging.debug("GC collecting result %s", gc.collect())
         except Exception, e:
             logging.exception("%s", e)
def deepCopy(obj):
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Example #3
0
    def serialize(self, obj, deferred=False):
        """Serialize an object

        :param obj: The object to serialize.
        :param deferred: When this is true Deferred objects are
            serialized and their values are loaded on deserialization.
            When this is false Deferred objects are not serializable.
        """
        if deferred:
            args = {}

            def persistent_id(obj):
                if isinstance(obj, Deferred):
                    args[obj.id] = obj
                    return obj.id
                return None
        else:
            args = None

            def persistent_id(obj):
                if isinstance(obj, Deferred):
                    raise PicklingError('%s cannot be serialized' % obj)
                return None
        data = StringIO()
        pickle = Pickler(data, HIGHEST_PROTOCOL)
        pickle.persistent_id = persistent_id
        pickle.dump(obj)
        msg = data.getvalue()
        return (msg, args) if deferred else msg
Example #4
0
 def block_artist(self, artist_name):
     """store artist name and current daytime so songs by that
     artist can be blocked
     """
     self._blocked_artists.append(artist_name)
     self._blocked_artists_times.append(self.now)
     self.log("Blocked artist: %s (%s)" % (
         artist_name,
         len(self._blocked_artists)))
     if self.store_blocked_artists:
         dump = os.path.join(
             self.player_get_userdir(), "autoqueue_block_cache")
         try:
             os.remove(dump)
         except OSError:
             pass
     if len(self._blocked_artists) == 0:
         return
     if self.store_blocked_artists:
         pickle_file = open(dump, 'w')
         pickler = Pickler(pickle_file, -1)
         to_dump = (self._blocked_artists,
                    self._blocked_artists_times)
         pickler.dump(to_dump)
         pickle_file.close()
def generate_episodedata_pickle(episodes):
    data = {}
    sys.stderr.write("Processed videos: ")
    for ep in episodes:
        epdata = {}
        epdata.update(ep)
        episode_url = "http://roosterteeth.com/archive/episode.php?id=%i" % ep["rtid"]
        blipid = get_blipid(episode_url)
        if blipid == None:
            # Youtube
            youtubeid = get_youtubeid(episode_url)
            if youtubeid != None:
                # Setup YouTube Service and get data
                youtube_service = gdata.youtube.service.YouTubeService()
                video_entry = youtube_service.GetYouTubeVideoEntry(video_id=youtubeid)
                epdata.update(parse_youtube(youtubeid, video_entry))
        else:
            page = urllib2.urlopen("http://blip.tv/rss/flash/%s" % blipid)
            epdata.update(parse_bliptv(page.read()))
            page.close()
        data[ep["rtid"]] = epdata
        sys.stderr.write("%s " % ep["rtid"])
    sys.stderr.write("\n")
    picklefile = StringIO()
    pickler = Pickler(picklefile, -1)
    pickler.dump(data)
    picklefile.seek(0)
    return picklefile
Example #6
0
class PickleItemExporter(BaseItemExporter):
    def __init__(self, file, protocol=0, **kwargs):
        self._configure(kwargs)
        self.pickler = Pickler(file, protocol)

    def export_item(self, item):
        self.pickler.dump(dict(self._get_serialized_fields(item)))
Example #7
0
 def __setitem__(self, key, value):
     if self.writeback:
         self.cache[key] = value
     f = StringIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key] = f.getvalue()
Example #8
0
 def __setitem__(self, key, value):
     if self.writeback:
         self.cache[key] = value
     f = StringIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key] = f.getvalue()
Example #9
0
class PickleItemExporter(BaseItemExporter):
    def __init__(self, file, protocol=0, **kwargs):
        self._configure(kwargs)
        self.pickler = Pickler(file, protocol)

    def export_item(self, item):
        self.pickler.dump(dict(self._get_serialized_fields(item)))
Example #10
0
def _pickled_setitem_(self, key, value):
    """ Add object (pickle if needed)  to dbase
    >>> db['A/B/C'] = obj
    """
    ##
    if self.writeback:
        self.cache[key] = value

    ## not TObject? pickle it and convert to Ostap.BLOB
    if not isinstance(value, ROOT.TObject):
        ## (1) pickle it
        f = BytesIO()
        p = Pickler(f, self.protocol)
        p.dump(value)
        ## (2) zip it
        z = zlib.compress(f.getvalue(), self.compresslevel)
        ## (3) put it into  BLOB
        from ostap.core.core import Ostap
        blob = Ostap.BLOB(key)
        status = Ostap.blob_from_bytes(blob, z)
        value = blob
        del z, f, p

    ## finally use ROOT
    self.dict[key] = value
Example #11
0
 def pickle_self(self):
     ofile = open(
         join(self.p.picklepath,
              self.ownname + '_' + self.p.label + '.txt'), 'w')
     einmachglas = Pickler(ofile)
     einmachglas.dump(self)
     ofile.close()
Example #12
0
def save_game(username, save_obj):
    f = open(username + '.crsf','w')
    p = Pickler(f)
    p.dump(save_obj)
    f.close()
    
    pack_files(username)
Example #13
0
def pickle_matrices(matrices, outdir='.'):
	"""Pickles dictionary of matrices output by create_matrices"""
	for name, matrix in matrices.iteritems():
		fpath = os.path.join(outdir, name + '.pickle')
		with open(fpath, 'wb') as fh:
			pickler = Pickler(fh, HIGHEST_PROTOCOL)
			pickler.dump(matrix)
Example #14
0
 def __setitem__(self, key, value):
     f = StringIO()
     pickler = Pickler(f, self.PICKLING_PROTOCOL)
     pickler.dump(value)
     self._dict[str(key)] = f.getvalue()
     if hasattr(self._dict, 'sync'):
         self._dict.sync()
Example #15
0
def cloneByPickle(obj, ignore_list=()):
    """Makes a copy of a ZODB object, loading ghosts as needed.

    Ignores specified objects along the way, replacing them with None
    in the copy.
    """
    ignore_dict = {}
    for o in ignore_list:
        ignore_dict[id(o)] = o

    def persistent_id(ob, ignore_dict=ignore_dict):
        if ignore_dict.has_key(id(ob)):
            return 'ignored'
        if getattr(ob, '_p_changed', 0) is None:
            ob._p_changed = 0
        return None

    def persistent_load(ref):
        assert ref == 'ignored'
        # Return a placeholder object that will be replaced by
        # removeNonVersionedData().
        placeholder = SimpleItem()
        placeholder.id = "ignored_subobject"
        return placeholder

    stream = StringIO()
    p = Pickler(stream, 1)
    p.persistent_id = persistent_id
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    u.persistent_load = persistent_load
    return u.load()
Example #16
0
 def pickle_results(self):
     data = file('participant_%d.dat' % self.participant_id,'w')
     p = Pickler(data)
     experiment = {}
     experiment['test_sentences'] = self.test_sentences
     experiment['results'] = self.results
     p.dump(experiment)
def cloneByPickle(obj, ignore_list=()):
    """Makes a copy of a ZODB object, loading ghosts as needed.

    Ignores specified objects along the way, replacing them with None
    in the copy.
    """
    ignore_dict = {}
    for o in ignore_list:
        ignore_dict[id(o)] = o

    def persistent_id(ob, ignore_dict=ignore_dict):
        if ignore_dict.has_key(id(ob)):
            return 'ignored'
        if getattr(ob, '_p_changed', 0) is None:
            ob._p_changed = 0
        return None

    def persistent_load(ref):
        assert ref == 'ignored'
        # Return a placeholder object that will be replaced by
        # removeNonVersionedData().
        placeholder = SimpleItem()
        placeholder.id = "ignored_subobject"
        return placeholder

    stream = StringIO()
    p = Pickler(stream, 1)
    p.persistent_id = persistent_id
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    u.persistent_load = persistent_load
    return u.load()
Example #18
0
File: core.py Project: solebox/WorQ
    def serialize(self, obj, deferred=False):
        """Serialize an object

        :param obj: The object to serialize.
        :param deferred: When this is true Deferred objects are
            serialized and their values are loaded on deserialization.
            When this is false Deferred objects are not serializable.
        """
        if deferred:
            args = {}

            def persistent_id(obj):
                if isinstance(obj, Deferred):
                    args[obj.id] = obj
                    return obj.id
                return None
        else:
            args = None

            def persistent_id(obj):
                if isinstance(obj, Deferred):
                    raise PicklingError('%s cannot be serialized' % obj)
                return None

        data = StringIO()
        pickle = Pickler(data, HIGHEST_PROTOCOL)
        pickle.persistent_id = persistent_id
        pickle.dump(obj)
        msg = data.getvalue()
        return (msg, args) if deferred else msg
Example #19
0
    def replicate(self, target):
        d = {'id': self.id,
             'user': self.user, 
             'description': self.description,
             'entries': self._entries}
        f = StringIO()
        p = Pickler(f)
        p.dump(d)

        payloadStr = f.getvalue()
        LOG('Replication', INFO, 'replicate> transaction id: %s; '
                'size (uncompressed): %s' % (
                    oid2str(self.id), len(payloadStr))) #DBG
        payloadStr = compress(payloadStr)

        handler = FixedHTTPHandler()
        opener = urllib2.build_opener(handler)
        urllib2.install_opener(opener)

        LOG('Replication', INFO, 'replicate> transaction id: %s; size: %s' % (
            oid2str(self.id), len(payloadStr))) #DBG
        url = '%s/load' % target.url
        schema, domain, path, x1, x2, x3 = urlparse.urlparse(url)
        newurl = '%s://%s:%s@%s%s' % (
            schema, target.username, target.password, domain, path)
        try:
            urllib2.urlopen(newurl, urllib.urlencode({'data': payloadStr}))
        except urllib2.HTTPError, e:
            if e.code != 204: # 204 == 'No content' which is what we expect
                raise
Example #20
0
    def validate(self):
        if self.opt.verbose:
            print(self.testname)
        if not os.path.exists(self.cp.get("jing", "path")):
            print("Error: jing not found.")
            print("  Looked in: %s" % self.cp.get("jing", "path"))
            sys.exit()
        m = re.match("(?sm).*version=\"([.0-9a-z]+)\".*", self.data["csl"])
        if m:
            rnc_path = os.path.join(self.cp.get("csl", "v%s" % m.group(1)))
        else:
            print("Error: Unable to find CSL version in %s" % self.hp)
            sys.exit()
        tfd, tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd, self.data["csl"].encode('utf8'))
        os.close(tfd)

        jfh = os.popen("%s %s -c %s %s" %
                       (self.cp.get("jing", "command"),
                        self.cp.get("jing", "path"), rnc_path, tfilename))
        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:", line)
            if e:
                print(line)
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)", line)
            if m:
                if success:
                    print("\n##")
                    print("#### Error%s in CSL for test: %s" %
                          (plural, self.hp))
                    print("##\n")
                    success = False
                print("  %s @ line %s" % (m.group(3).upper(), m.group(1)))
                plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print("")
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print("%3d  %s" % (linepos, cslline))
                linepos += 1
            pfh = open(self.pickle, "wb+")
            pickler = Pickler(pfh)

            pickler.dump((opt, self.pos))
            sys.exit()
Example #21
0
 def pickle_the_memory(self):
     outfile = open(
         join(
             self.p.datapath, self.ea.ownname + '_cecLog_memory_' +
             self.p.label[:-5] + '.txt'), 'w')
     einmachglas = Pickler(outfile)
     einmachglas.dump(self.memory)
     outfile.close()
Example #22
0
 def compress_item ( self , value ) :
     """Compress (zip) the item using ``bz2.compress''
     - see bz2.compress
     """
     f = BytesIO ()
     p = Pickler ( f , self.protocol )
     p.dump ( value )
     return bz2.compress ( f.getvalue() , self.compresslevel )
Example #23
0
    def validate(self):
        if self.opt.verbose:
            print self.testname
        if not os.path.exists(os.path.join("..", "jing")):
            print "Error: jing not found as sibling of processor archive."
            print "  Looked in: %s" % os.path.join("..", "jing")
            sys.exit()
        m = re.match("(?sm).*version=\"([.0-9a-z]+)\".*", self.data["csl"])
        if m:
            rnc_path = os.path.join("csl", "%s" % m.group(1), "csl.rnc")
        else:
            print "Error: Unable to find CSL version in %s" % self.hp
            sys.exit()
        tfd, tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd, self.data["csl"])
        os.close(tfd)

        jfh = os.popen("java -jar %s -c %s %s" % (os.path.join(
            "..", "jing", "bin", "jing.jar"), rnc_path, tfilename))
        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:", line)
            if e:
                print line
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)", line)
            if m:
                if success:
                    print "\n##"
                    print "#### Error%s in CSL for test: %s" % (plural,
                                                                self.hp)
                    print "##\n"
                    success = False
                print "  %s @ line %s" % (m.group(3).upper(), m.group(1))
                plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print ""
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print "%3d  %s" % (linepos, cslline)
                linepos += 1
            pfh = open(self.pickle, "w+b")
            pickler = Pickler(pfh)

            pickler.dump((opt, self.pos))
            sys.exit()
Example #24
0
    def __setitem__(self, key, value):
        with self._cache_write_lock:
            self._cache[key] = value

        f = StringIO()
        p = Pickler(f, self._protocol)
        p.dump(value)

        self._storage.redis.hset(self._hash_key, key, f.getvalue())
Example #25
0
def _zip_setitem(self, key, value):
    """``set-and-compress-item'' to dbase 
    """
    if self.writeback:
        self.cache[key] = value
    f = StringIO()
    p = Pickler(f, self._protocol)
    p.dump(value)
    self.dict[key] = zlib.compress(f.getvalue(), self.compresslevel)
Example #26
0
    def __setitem__(self, key, value):
        with self._cache_write_lock:
            self._cache[key] = value

        f = StringIO()
        p = Pickler(f, self._protocol)
        p.dump(value)

        self._storage.redis.hset(self._hash_key, key, f.getvalue())
def deepcopy(obj):
    """Makes a deep copy of the object using the pickle mechanism.
    """
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(aq_base(obj))
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Example #28
0
def deepcopy(obj):
    """Makes a deep copy of the object using the pickle mechanism.
    """
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(aq_base(obj))
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Example #29
0
    def testPickleUnpickle(self):
        s = StringIO()
        p = Pickler(s)
        p.dump(Allow)
        s.seek(0)
        u = Unpickler(s)
        newAllow = u.load()

        self.failUnless(newAllow is Allow)
Example #30
0
    def validate(self):
        if self.opt.verbose:
            print(self.testname)
        if not os.path.exists(self.cp.get("jing", "path")):
            print("Error: jing not found.")
            print("  Looked in: %s" % self.cp.get("jing", "path"))
            sys.exit()
        m = re.match("(?sm).*version=\"([.0-9a-z]+)\".*",self.data["csl"])
        if m:
            rnc_path = os.path.join(self.cp.get("csl", "v%s" % m.group(1)))
        else:
            print("Error: Unable to find CSL version in %s" % self.hp)
            sys.exit()
        tfd,tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd,self.data["csl"].encode('utf8'))
        os.close(tfd)
        
        jfh = os.popen("%s %s -c %s %s" % (self.cp.get("jing", "command"), self.cp.get("jing", "path"),rnc_path,tfilename))
        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:",line)
            if e:
                print(line)
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)",line)
            if m:
              if success:
                  print("\n##")
                  print("#### Error%s in CSL for test: %s" % (plural,self.hp))
                  print("##\n")
                  success = False
              print("  %s @ line %s" %(m.group(3).upper(),m.group(1)))
              plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print("")
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print("%3d  %s" % (linepos,cslline))
                linepos += 1
            pfh = open( self.pickle,"wb+")
            pickler = Pickler( pfh )

            pickler.dump( (opt, self.pos) )
            sys.exit()
Example #31
0
    def validate(self):
        if self.opt.verbose:
            print self.testname
        if not os.path.exists(os.path.join("..","jing")):
            print "Error: jing not found as sibling of processor archive."
            print "  Looked in: %s" % os.path.join("..","jing")
            sys.exit()
        m = re.match("(?sm).*version=\"([.0-9a-z]+)\".*",self.data["csl"])
        if m:
            rnc_path = os.path.join("csl","%s" % m.group(1), "csl.rnc")
        else:
            print "Error: Unable to find CSL version in %s" % self.hp
            sys.exit()
        tfd,tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd,self.data["csl"])
        os.close(tfd)
        
        jfh = os.popen("java -jar %s -c %s %s" % (os.path.join("..","jing","bin","jing.jar"),rnc_path,tfilename))
        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:",line)
            if e:
                print line
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)",line)
            if m:
              if success:
                  print "\n##"
                  print "#### Error%s in CSL for test: %s" % (plural,self.hp)
                  print "##\n"
                  success = False
              print "  %s @ line %s" %(m.group(3).upper(),m.group(1))
              plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print ""
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print "%3d  %s" % (linepos,cslline)
                linepos += 1
            pfh = open( self.pickle,"w+b")
            pickler = Pickler( pfh )

            pickler.dump( (opt, self.pos) )
            sys.exit()
Example #32
0
def pickle(obj, filename, protocol=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, protocol)
        p.dump(obj)
        f.close(); f = None
        ##print "Pickled", filename
    finally:
        if f: f.close()
Example #33
0
 def test_config_and_collector_pickling(self):
     from cPickle import Pickler, Unpickler
     dir1 = self.tmpdir.ensure("somedir", dir=1)
     config = py.test.config._reparse([self.tmpdir])
     col = config.getfsnode(config.topdir)
     col1 = col.join(dir1.basename)
     assert col1.parent is col 
     io = py.std.cStringIO.StringIO()
     pickler = Pickler(io)
     pickler.dump(config)
     pickler.dump(col)
     pickler.dump(col1)
     pickler.dump(col)
     io.seek(0) 
     unpickler = Unpickler(io)
     newconfig = unpickler.load()
     topdir = self.tmpdir.ensure("newtopdir", dir=1)
     newconfig._initafterpickle(topdir)
     topdir.ensure("somedir", dir=1)
     newcol = unpickler.load()
     newcol2 = unpickler.load()
     newcol3 = unpickler.load()
     assert newcol2._config is newconfig 
     assert newcol2.parent == newcol 
     assert newcol._config is newconfig
     assert newconfig.topdir == topdir
     assert newcol3 is newcol
     assert newcol.fspath == topdir 
     assert newcol2.fspath.basename == dir1.basename
     assert newcol2.fspath.relto(topdir)
Example #34
0
    def validate(self, validator_path, csl_schema_path, cslm_schema_path):
        if self.opt.verbose:
            print self.testname
        m = re.match("(?sm).*version=\"1.1mlz1\".*", self.data["csl"])
        if m:
            rnc_path = cslm_schema_path
        else:
            rnc_path = csl_schema_path
        tfd, tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd, self.data["csl"])
        os.close(tfd)
        jfh = os.popen("%s %s %s" % (validator_path, rnc_path, tfilename))

        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:", line)
            if e:
                print line
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)", line)
            if m:
                if success:
                    print "\n##"
                    print "#### Error%s in CSL for test: %s" % (plural,
                                                                self.hp)
                    print "##\n"
                    success = False
                print "  %s @ line %s" % (m.group(3).upper(), m.group(1))
                plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print ""
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print "%3d  %s" % (linepos, cslline)
                linepos += 1
            pfh = open(self.pickle, "w+b")
            pickler = Pickler(pfh)

            pickler.dump((opt, self.pos))
            sys.exit()
        sys.stdout.write(".")
        sys.stdout.flush()
Example #35
0
def pickle(obj, filename, binmode=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, binmode)
        p.dump(obj)
        f.close()
        f = None

    finally:
        if f: f.close()
Example #36
0
def fast_encode():
    # Only use in cases where you *know* the data contains only basic
    # Python objects
    pickler = Pickler(1)
    pickler.fast = 1
    dump = pickler.dump

    def fast_encode(*args):
        return dump(args, 1)

    return fast_encode
Example #37
0
def _zip_setitem(self, key, value):
    """ ``set-and-compress-item'' to dbase 
    """
    ADD_ITEM = 'REPLACE INTO %s (key, value) VALUES (?,?)' % self.tablename

    f = BytesIO()
    p = Pickler(f, self.protocol)
    p.dump(value)
    blob = f.getvalue()
    zblob = zlib.compress(blob, self.compression)
    self.conn.execute(ADD_ITEM, (key, sqlite3.Binary(zblob)))
Example #38
0
def set_action_param_var(list_of_dict_files, **kwargs):
    '''
    Argument is dictionary to be pickled.  Return value is name of file.
    '''
    from cPickle import Pickler
    import tempfile
    with tempfile.NamedTemporaryFile(dir='/var/spool/ion',delete=False,mode='w+b',**kwargs) as fileh:
        pickle = Pickler(fileh)
        pickle.dump(list_of_dict_files)
    return fileh.name
    '''
Example #39
0
def pickle(obj, filename, protocol=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, protocol)
        p.dump(obj)
        f.close()
        f = None
        ##print "Pickled", filename
    finally:
        if f: f.close()
Example #40
0
def saveSVM(pickle_filename, svm, kernel):
    """Pickles a Shogun SVM object to a file by saving its settings"""
    from cPickle import Pickler
    pickle_file = open(pickle_filename, 'wb')
    pck = Pickler(pickle_file)
    pck.dump((__version__, \
              svm.get_num_support_vectors(), \
              kernel.get_name(), \
              svm.get_bias(), \
              svm.get_alphas(), \
              svm.get_support_vectors()))
    pickle_file.close()
Example #41
0
    def validate(self, validator_path, csl_schema_path, cslm_schema_path):
        if self.opt.verbose:
            print self.testname
        m = re.match("(?sm).*version=\"1.1mlz1\".*",self.data["csl"])
        if m:
            rnc_path = cslm_schema_path
        else:
            rnc_path = csl_schema_path
        tfd,tfilename = tempfile.mkstemp(dir=".")
        os.write(tfd,self.data["csl"])
        os.close(tfd)
        jfh = os.popen("%s %s %s" % (validator_path,rnc_path,tfilename))

        success = True
        plural = ""
        while 1:
            line = jfh.readline()
            if not line: break
            line = line.strip()
            e = re.match("^fatal:",line)
            if e:
                print line
                sys.exit()
            m = re.match(".*:([0-9]+):([0-9]+):  *error:(.*)",line)
            if m:
              if success:
                  print "\n##"
                  print "#### Error%s in CSL for test: %s" % (plural,self.hp)
                  print "##\n"
                  success = False
              print "  %s @ line %s" %(m.group(3).upper(),m.group(1))
              plural = "s"
        jfh.close()
        os.unlink(tfilename)
        if not success:
            print ""
            io = StringIO()
            io.write(self.data["csl"])
            io.seek(0)
            linepos = 1
            while 1:
                cslline = io.readline()
                if not cslline: break
                cslline = cslline.rstrip()
                print "%3d  %s" % (linepos,cslline)
                linepos += 1
            pfh = open( self.pickle,"w+b")
            pickler = Pickler( pfh )

            pickler.dump( (opt, self.pos) )
            sys.exit()
        sys.stdout.write(".")
        sys.stdout.flush()
Example #42
0
 def dump_pickle(self, archive):
     logging.info("Pickling latest results to {0}.".format(self.pickle_path))
     pickle_fp = self._get_pickle_file_pointer('wb')
     pickler = Pickler(pickle_fp, protocol=2)
     try:
         pickler.dump(archive)
     except PickleError as err:
         logging.critical("Pickling failure.  Error: {}".format(repr(err)))
     finally:
         pickle_fp.close()
     self.pickle_exists = True
     logging.info("Pickling complete to {0}".format(self.pickle_path))
     return()
Example #43
0
    def sync(self):
        if not self._cache:
            return

        with self._cache_write_lock, self._storage.redis.pipeline() as pipeline:
            for key, entry in self._cache.items():
                f = StringIO()
                p = Pickler(f, self._protocol)
                p.dump(entry)
                pipeline.hset(self._hash_key, key, f.getvalue())

            pipeline.execute()
            self._cache.clear()
Example #44
0
def set_action_param_file(list_of_dict_files):
    '''
    Argument is dictionary to be pickled.  Return value is name of file.
    '''
    from cPickle import Pickler
    import tempfile
    action = list_of_dict_files[0].get('action','unk')
    fileh = tempfile.NamedTemporaryFile(dir='/tmp',delete=False,mode='w+b',prefix=action)
    #fileh = open(fileh.name,'wb')
    pickle = Pickler(fileh)
    pickle.dump(list_of_dict_files)
    fileh.close()
    return fileh.name
Example #45
0
 def test_config_and_collector_pickling_missing_initafter(self):
     from cPickle import Pickler, Unpickler
     config = py.test.config._reparse([self.tmpdir])
     col = config.getfsnode(config.topdir)
     io = py.std.cStringIO.StringIO()
     pickler = Pickler(io)
     pickler.dump(config)
     pickler.dump(col)
     io.seek(0) 
     unpickler = Unpickler(io)
     newconfig = unpickler.load()
     # we don't call _initafterpickle ... so
     py.test.raises(ValueError, "unpickler.load()")
Example #46
0
    def sync(self):
        if not self._cache:
            return

        with self._cache_write_lock, self._storage.redis.pipeline() as pipeline:
            for key, entry in self._cache.iteritems():
                f = StringIO()
                p = Pickler(f, self._protocol)
                p.dump(entry)
                pipeline.hset(self._hash_key, key, f.getvalue())

            pipeline.execute()
            self._cache.clear()
def getSize(obj):
    """Calculate the size as cheap as possible
    """
    # Try the cheap variants first.
    # Actually the checks ensure the code never fails but beeing sure
    # is better.
    try:
        # check if to return zero (length is zero)
        if len(obj) == 0:
            return 0
    except:
        pass

    try:
        # check if ``IStreamableReference``
        if IStreamableReference.providedBy(obj):
            size = obj.getSize()
            if size is not None:
                return size
    except:
        pass

    try:
        # string
        if isinstance(obj, types.StringTypes):
            return len(obj)
    except:
        pass

    try:
        # file like object
        methods = dir(obj)
        if "seek" in methods and "tell" in methods:
            currentPos = obj.tell()
            obj.seek(0, 2)
            size = obj.tell()
            obj.seek(currentPos)
            return size
    except:
        pass

    try:
        # fallback: pickling the object
        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(obj)
        size = stream.tell()
    except:
        size = None

    return size
def storeToFile(stuff, filename, verbose=True):
    ''' Store one item (e.g. state list or networkx graph) to file. '''
    filename = os.path.normcase(filename)
    directory = os.path.dirname(filename)
    createIfNotExist(directory)

    f = open(filename, 'wb')
    p = Pickler(f, protocol=2)
    p.dump(stuff)
    f.close()
    if verbose:
        total = len(stuff)
        print "Written %i items to pickled binary file: %s" % (total, filename)
    return filename
Example #49
0
def set_action_param_var(list_of_dict_files, **kwargs):
    '''
    Argument is dictionary to be pickled.  Return value is name of file.
    '''
    from cPickle import Pickler
    import tempfile
    with tempfile.NamedTemporaryFile(dir='/var/spool/ion',
                                     delete=False,
                                     mode='w+b',
                                     **kwargs) as fileh:
        pickle = Pickler(fileh)
        pickle.dump(list_of_dict_files)
    return fileh.name
    '''
Example #50
0
    def save(self):
        '''Save object into DB.'''
        resp = self.response
        values = []
        values.append(resp.getId())
        values.append(self.request.getURI().url_string)
        values.append(resp.getCode())
        values.append(self.tag)
        values.append(int(self.mark))
        values.append(str(resp.info()))
        values.append(resp.getWaitTime())
        values.append(resp.getMsg())
        values.append(resp.content_type)
        ch = resp.charset
        values.append(ch)
        values.append(self.request.getMethod())
        values.append(len(resp.body))
        code = int(resp.getCode()) / 100
        values.append(code)
        values.append(resp.getAlias())
        values.append(int(self.request.getURI().hasQueryString()))

        if not self.id:
            sql = ('INSERT INTO %s '
            '(id, url, code, tag, mark, info, time, msg, content_type, '
                    'charset, method, response_size, codef, alias, has_qs) '
            'VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)' % self._dataTable)
            self._db.execute(sql, values)
            self.id = self.response.getId()
        else:
            values.append(self.id)
            sql = ('UPDATE %s' 
            ' SET id = ?, url = ?, code = ?, tag = ?, mark = ?, info = ?, '
                        'time = ?, msg = ?, content_type = ?, charset = ?, '
            'method = ?, response_size = ?, codef = ?, alias = ?, has_qs = ? '
            ' WHERE id = ?' % self._dataTable)
            self._db.execute(sql, values)
        
        # 
        # Save raw data to file
        #
        fname = os.path.join(self._sessionDir, str(self.response.id) + self._ext)

        with FileLock(fname, timeout=1):
        
            rrfile = open(fname, 'wb')
            p = Pickler(rrfile)
            p.dump((self.request, self.response))
            rrfile.close()
            return True
Example #51
0
 def test_config_and_collector_pickling(self, testdir):
     from cPickle import Pickler, Unpickler
     tmpdir = testdir.tmpdir
     dir1 = tmpdir.ensure("somedir", dir=1)
     config = testdir.parseconfig()
     col = config.getfsnode(config.topdir)
     col1 = col.join(dir1.basename)
     assert col1.parent is col
     io = py.std.cStringIO.StringIO()
     pickler = Pickler(io)
     pickler.dump(col)
     pickler.dump(col1)
     pickler.dump(col)
     io.seek(0)
     unpickler = Unpickler(io)
     topdir = tmpdir.ensure("newtopdir", dir=1)
     topdir.ensure("somedir", dir=1)
     old = topdir.chdir()
     try:
         newcol = unpickler.load()
         newcol2 = unpickler.load()
         newcol3 = unpickler.load()
         assert newcol2.config is newcol.config
         assert newcol2.parent == newcol
         assert newcol2.config.topdir.realpath() == topdir.realpath()
         assert newcol.fspath.realpath() == topdir.realpath()
         assert newcol2.fspath.basename == dir1.basename
         assert newcol2.fspath.relto(newcol2.config.topdir)
     finally:
         old.chdir()
Example #52
0
    def save_replay(self, data, replay_file):
        replay = {}
        replay["width"] = self.width / self.resolution
        replay["height"] = self.height / self.resolution
        replay["resolution"] = self.resolution
        replay["length"] = self.age
        replay["data"] = data

        with open(replay_file, "w") as fp:
            pkl = Pickler(fp)
            pkl.dump(replay)

            log = logging.getLogger("LATTICE")
            log.debug("Lattice run saved into replay")
Example #53
0
 def test_collector_implicit_config_pickling(self, testdir):
     from cPickle import Pickler, Unpickler
     tmpdir = testdir.tmpdir
     testdir.chdir()
     testdir.makepyfile(hello="def test_x(): pass")
     config = testdir.parseconfig(tmpdir)
     col = config.getfsnode(config.topdir)
     io = py.std.cStringIO.StringIO()
     pickler = Pickler(io)
     pickler.dump(col)
     io.seek(0)
     unpickler = Unpickler(io)
     col2 = unpickler.load()
     assert col2.name == col.name
     assert col2.listnames() == col.listnames()
Example #54
0
def set_action_param_var(list_of_dict_files):
    '''
    Argument is dictionary to be pickled.  Return value is name of file.
    '''
    from cPickle import Pickler
    import tempfile
    # Format name to include pk of dmfilestat object - in case we lose the data file itself
    # /var/spool/ion/<action>_<pk>_<randomstring>
    store_dir = '/var/spool/ion'
    prefix = "%s_%d_" % (list_of_dict_files[0]['action'], list_of_dict_files[0]['pk'])
    with tempfile.NamedTemporaryFile(dir=store_dir, delete=False, mode='w+b', prefix=prefix) as fileh:
        pickle = Pickler(fileh)
        pickle.dump(list_of_dict_files)
    return fileh.name
    '''
Example #55
0
def set_action_param_var(list_of_dict_files):
    '''
    Argument is dictionary to be pickled.  Return value is name of file.
    '''
    from cPickle import Pickler
    import tempfile
    # Format name to include pk of dmfilestat object - in case we lose the data file itself
    # /var/spool/ion/<action>_<pk>_<randomstring>
    store_dir = '/var/spool/ion'
    prefix = "%s_%d_" % (list_of_dict_files[0]['action'], list_of_dict_files[0]['pk'])
    with tempfile.NamedTemporaryFile(dir=store_dir, delete=False, mode='w+b', prefix=prefix) as fileh:
        pickle = Pickler(fileh)
        pickle.dump(list_of_dict_files)
    return fileh.name
    '''