def test_pickling(self):
        "Pickling"

        def checked_pickle(c):
            p = loads(dumps(c))
            assert_similar_contexts(c, p)
            return p

        c1, c2, c3 = self.factory(), self.factory(), self.factory()
        c3.update(a=1, x=arange(3))
        c2.update(a=2, x=-arange(3), c3=c3)
        c1.update(a=3, x=1 + arange(3), c2=c2)
        c1 = checked_pickle(c1)
        c1.c2 = checked_pickle(c1.c2)
        c1.c2.c3 = checked_pickle(c1.c2.c3)
        c1.c2 = checked_pickle(c1.c2)
        c1 = checked_pickle(c1)

        # Pickle a context with numpy.ufunc's: ufuncs don't pickle, so we throw
        # them out of the pickled state
        c = self.factory()
        fs = "all, arange, array, cos, sin"
        exec "from numpy import %s" % fs in {}, c
        loads(dumps(c))
        for f in fs.split(", "):
            assert f in c
Пример #2
0
    def test_versionUpgrade(self):
        global MyVersioned
        class MyVersioned(styles.Versioned):
            persistenceVersion = 2
            persistenceForgets = ['garbagedata']
            v3 = 0
            v4 = 0

            def __init__(self):
                self.somedata = 'xxx'
                self.garbagedata = lambda q: 'cant persist'

            def upgradeToVersion3(self):
                self.v3 += 1

            def upgradeToVersion4(self):
                self.v4 += 1
        mv = MyVersioned()
        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
        pickl = pickle.dumps(mv)
        MyVersioned.persistenceVersion = 4
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3, "didn't do version 3 upgrade"
        assert obj.v4, "didn't do version 4 upgrade"
        pickl = pickle.dumps(obj)
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3 == 1, "upgraded unnecessarily"
        assert obj.v4 == 1, "upgraded unnecessarily"
Пример #3
0
def import_from_queue(progress, conn, task_queue, error_queue, replace_conflicts, durability, write_count):
    if progress[0] is not None and not replace_conflicts:
        # We were interrupted and it's not ok to overwrite rows, check that the batch either:
        # a) does not exist on the server
        # b) is exactly the same on the server
        task = progress[0]
        pkey = r.db(task[0]).table(task[1]).info().run(conn)["primary_key"]
        for i in reversed(range(len(task[2]))):
            obj = pickle.loads(task[2][i])
            if pkey not in obj:
                raise RuntimeError("Connection error while importing.  Current row has no specified primary key, so cannot guarantee absence of duplicates")
            row = r.db(task[0]).table(task[1]).get(obj[pkey]).run(conn)
            if row == obj:
                write_count[0] += 1
                del task[2][i]
            else:
                raise RuntimeError("Duplicate primary key `%s`:\n%s\n%s" % (pkey, str(obj), str(row)))

    task = task_queue.get() if progress[0] is None else progress[0]
    while not isinstance(task, StopIteration):
        try:
            # Unpickle objects (TODO: super inefficient, would be nice if we could pass down json)
            objs = [pickle.loads(obj) for obj in task[2]]
            conflict_action = 'replace' if replace_conflicts else 'error'
            res = r.db(task[0]).table(task[1]).insert(objs, durability=durability, conflict=conflict_action).run(conn)
        except:
            progress[0] = task
            raise

        if res["errors"] > 0:
            raise RuntimeError("Error when importing into table '%s.%s': %s" %
                               (task[0], task[1], res["first_error"]))

        write_count[0] += len(objs)
        task = task_queue.get()
Пример #4
0
    def get_traceback_context(self, request, obj):
        """
        Create a technical server error response. The last three arguments are
        the values returned from sys.exc_info() and friends.
        """
        try:
            module, args, frames = pickle.loads(base64.b64decode(obj.data['exc']).decode('zlib'))
        except:
            module, args, frames = pickle.loads(base64.b64decode(obj.data['exc']))
        obj.class_name = str(obj.class_name)

        # We fake the exception class due to many issues with imports/builtins/etc
        exc_type = type(obj.class_name, (Exception,), {})
        exc_value = exc_type(obj.message)

        exc_value.args = args
        
        fake_request = FakeRequest()
        fake_request.META = obj.data.get('META', {})
        fake_request.GET = obj.data.get('GET', {})
        fake_request.POST = obj.data.get('POST', {})
        fake_request.FILES = obj.data.get('FILES', {})
        fake_request.COOKIES = obj.data.get('COOKIES', {})
        fake_request.url = obj.url
        if obj.url:
            fake_request.path_info = '/' + obj.url.split('/', 3)[-1]
        else:
            fake_request.path_info = ''

        reporter = ImprovedExceptionReporter(fake_request, exc_type, exc_value, frames)
        html = reporter.get_traceback_html()
        
        return {
            'error_body': mark_safe(html),
        }
Пример #5
0
 def load(self):
     try:
         if self.clf is not None:
             return
         
         user_model = UserModel.objects(user_id = self.user.id).first()
         
         if user_model is None:
             logger.debug("UserModel for user %s is empty." % self.user.id)
             self.clf = None
             return
         
         #ensure right version
         if user_model.version != self.get_version():
             logger.debug("UserModel for user %s has wrong version." %
                          self.user.id)
             self.clf = None
             return
         
         #unpickle classifier. it was saved as a utf-8 string.
         #get the str object by encoding it.
         pickled_classifier = user_model.data.clf.encode('utf-8')
         pickled_theta = user_model.data.theta.encode('utf-8')
         pickled_sigma = user_model.data.sigma.encode('utf-8')
         
         self.clf = cPickle.loads(pickled_classifier)
         self.theta_ = cPickle.loads(pickled_theta)
         self.sigma_ = cPickle.loads(pickled_sigma)
             
     except Exception as inst:
         logger.error("Could not load learned user model due to unknown"
                      " error %s: %s" % (type(inst), inst))
Пример #6
0
def create_function_from_source(function_source, imports=None):
    """Return a function object from a function source

    Parameters
    ----------
    function_source : pickled string
        string in pickled form defining a function
    imports : list of strings
        list of import statements in string form that allow the function
        to be executed in an otherwise empty namespace
    """
    ns = {}
    import_keys = []
    try:
        if imports is not None:
            for statement in imports:
                exec statement in ns
            import_keys = ns.keys()

        exec loads(function_source) in ns

    except Exception, msg:
        msg = str(msg) + '\nError executing function:\n %s\n'%function_source
        msg += '\n'.join(["Functions in connection strings have to be standalone.",
                          "They cannot be declared either interactively or inside",
                          "another function or inline in the connect string. Any",
                          "imports should be done inside the function"
                           ])
        raise RuntimeError(msg)
Пример #7
0
    def __init__(self, item):
        # Load a specific watermark method from a xml file

        # get class name, method name and method descriptor
        self.__class_name = item.getAttribute("class")
        self.__name = unescape(item.getAttribute("name"))
        self.__descriptor = item.getAttribute("descriptor")

        self.__wms = []

        # get the threshold
        th = int(item.getElementsByTagName("threshold")[0].firstChild.data)

        # load the y
        x = base64.b64decode(item.getElementsByTagName("sss")[0].firstChild.data)
        self.__dwbo = DWBOCheck(cPickle.loads(x), th)

        for s_item in item.getElementsByTagName("wm"):
            _type = str(s_item.getAttribute("type"))

            # load the context of the original watermark
            if WM_BIND[_type][1] == WM_CLASS:
                wb = WM_BIND[_type][0](None, None)
            else:
                wb = WM_BIND[_type][0](None, None, None)

            x = cPickle.loads(base64.b64decode(s_item.firstChild.data))
            wb.set_context(x)

            self.__wms.append((_type, wb))
Пример #8
0
    def setxattr(self, id_, name, value):

        # Handle S3QL commands
        if id_ == CTRL_INODE:
            if name == b"s3ql_flushcache!":
                self.cache.clear()
            elif name == "copy":
                self.copy_tree(*struct.unpack("II", value))
            elif name == "upload-meta":
                if self.upload_event is not None:
                    self.upload_event.set()
                else:
                    raise llfuse.FUSEError(errno.ENOTTY)
            elif name == "lock":
                self.lock_tree(*pickle.loads(value))
            elif name == "rmtree":
                self.remove_tree(*pickle.loads(value))
            elif name == "logging":
                update_logging(*pickle.loads(value))
            elif name == "cachesize":
                self.cache.max_size = pickle.loads(value)
            else:
                raise llfuse.FUSEError(errno.EINVAL)
        else:
            if self.inodes[id_].locked:
                raise FUSEError(errno.EPERM)

            self.db.execute(
                "INSERT OR REPLACE INTO ext_attributes (inode, name, value) " "VALUES(?, ?, ?)", (id_, name, value)
            )
            self.inodes[id_].ctime = time.time()
Пример #9
0
  def testNupicRandomPickling(self):
    """Test pickling / unpickling of NuPIC randomness."""

    # Simple test: make sure that dumping / loading works...
    r = Random(42)
    pickledR = pickle.dumps(r)

    test1 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test2 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(test1, test2,
                     "Simple NuPIC random pickle/unpickle failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    pickledR = pickle.dumps(r)

    test3 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test4 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(
        test3, test4,
        "NuPIC random pickle/unpickle didn't work for saving later state.")

    self.assertNotEqual(test1, test3,
                        "NuPIC random gave the same result twice?!?")
 def test_copying(self):
     # Check that counters are copyable, deepcopyable, picklable, and
     #have a repr/eval round-trip
     words = Counter('which witch had which witches wrist watch'.split())
     update_test = Counter()
     update_test.update(words)
     for i, dup in enumerate([
                 words.copy(),
                 copy.copy(words),
                 copy.deepcopy(words),
                 pickle.loads(pickle.dumps(words, 0)),
                 pickle.loads(pickle.dumps(words, 1)),
                 pickle.loads(pickle.dumps(words, 2)),
                 pickle.loads(pickle.dumps(words, -1)),
                 cPickle.loads(cPickle.dumps(words, 0)),
                 cPickle.loads(cPickle.dumps(words, 1)),
                 cPickle.loads(cPickle.dumps(words, 2)),
                 cPickle.loads(cPickle.dumps(words, -1)),
                 eval(repr(words)),
                 update_test,
                 Counter(words),
                 ]):
         msg = (i, dup, words)
         self.assertTrue(dup is not words)
         self.assertEquals(dup, words)
         self.assertEquals(len(dup), len(words))
         self.assertEquals(type(dup), type(words))
Пример #11
0
 def _shelf_callback(priKey, priData, realCallback=callback):
     # Safe in Python 2.x because expresion short circuit
     if sys.version_info[0] < 3 or isinstance(priData, bytes) :
         data = cPickle.loads(priData)
     else :
         data = cPickle.loads(bytes(priData, "iso8859-1"))  # 8 bits
     return realCallback(priKey, data)
Пример #12
0
    def testOldPickles(self):
        # Ensure that applications serializing pytz instances as pickles
        # have no troubles upgrading to a new pytz release. These pickles
        # where created with pytz2006j
        east1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
            "I0\nS'EST'\np3\ntRp4\n."
            ))
        east2 = pytz.timezone('US/Eastern').localize(
            datetime(2006, 1, 1)).tzinfo
        self.assertTrue(east1 is east2)

        # Confirm changes in name munging between 2006j and 2007c cause
        # no problems.
        pap1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
            "\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
        pap2 = pytz.timezone('America/Port-au-Prince').localize(
            datetime(1910, 1, 1)).tzinfo
        self.assertTrue(pap1 is pap2)

        gmt1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
        gmt2 = pytz.timezone('Etc/GMT+10')
        self.assertTrue(gmt1 is gmt2)
Пример #13
0
def recieveTables(s):    
    while True:
        print "Accepting"
        conn, addr = s.accept()
        print 'New connection from ', addr
        while True:
            try:
                lock.acquire()
                size1 = conn.recv(1024)
                size2 = conn.recv(1024)
                size1 = pickle.loads(size1)
                size2 = pickle.loads(size2)
                
                table1 = conn.recv(size1)
                table2 = conn.recv(size2)
                table1 = pickle.loads(table1)
                table2 = pickle.loads(table2)
                
                if table1 == 'END' or table2 == 'END':
                    print "Close"
                    conn.send("**END**")
                    conn.close()
                    break
                lock.release()
                tablesFetched.release()
            except Exception:
                conn.close()
                sys.exit()
    return
Пример #14
0
    def ajax_POST_send_message(self, urlparser, post):
        """
        Callback method for sending messages from the webui via the ajax app.

        You can pass it a serialized callbacks with args, and either choose
        to send a message to a given reporter or to an anonymous one,
        according to a backend and an indentity.         
        """
        
        pre_send_callback = pickle.loads(post.get('pre_send_callback', 
                                            PICKLED_LAMBDA)) or return_none
        pre_send_callback_kwargs = pickle.loads(post.get('pre_send_callback_kwargs', 
                                                         PICKLED_DICT)) or {}
                                        
        post_send_callback = pickle.loads(post.get('post_send_callback', 
                                                   PICKLED_LAMBDA)) or return_none
        post_send_callback_kwargs = pickle.loads(post.get('post_send_callback_kwargs', 
                                                           PICKLED_DICT)) or {}
                                        
        
        backend_slug = post.get('backend', '')
        identity = post.get('identity', '')
        
        if not backend_slug or not identity:
            try:
                rep = Reporter.objects.get(pk=post.get('reporter', -1))
            except Reporter.DoesNotExist:
                raise DirectSmsError(u"You need to specify either a valid "\
                                     u"reporter or a valid backend & identity")
            else:
                pconn = rep.connection()

                # abort if we don't know where to send the message to
                # (if the device the reporter registed with has been
                # taken by someone else, or was created in the WebUI)
                if not pconn:
                   raise DirectSmsError("%s is unreachable (no connection)" % rep)
                   
                backend_slug = pconn.backend.slug
                identity = pconn.identity

        # attempt to send the message
        be = self.router.get_backend(backend_slug)
        
        if not be:
            raise DirectSmsError(u"The backend '%s' is not installed. "\
                                 u"Check your 'local.ini' file." \
                                 % backend_slug )
        
        message = be.message(identity, post["text"])
        
        pre_send_callback(outgoing_message=message, **pre_send_callback_kwargs)
        
        sent = message.send()
        
        post_send_callback(outgoing_message=message, **post_send_callback_kwargs)

        # attempt to call the callback 
        
        return sent
Пример #15
0
    def testDatabaseFixes(self):
        # Hack the pickle to make it refer to a timezone abbreviation
        # that does not match anything. The unpickler should be able
        # to repair this case
        tz = pytz.timezone('Australia/Melbourne')
        p = pickle.dumps(tz)
        tzname = tz._tzname
        hacked_p = p.replace(_byte_string(tzname),
                             _byte_string('?'*len(tzname)))
        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertTrue(tz is unpickled_tz)

        # Simulate a database correction. In this case, the incorrect
        # data will continue to be used.
        p = pickle.dumps(tz)
        new_utcoffset = tz._utcoffset.seconds + 42

        # Python 3 introduced a new pickle protocol where numbers are stored in
        # hexadecimal representation. Here we extract the pickle
        # representation of the number for the current Python version.
        old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
        new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
        hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)

        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
        self.assertTrue(tz is not unpickled_tz)
Пример #16
0
def _report_variant_pathways(c, args, idx_to_sample):
    
    (agn_paths, hgnc_paths, ensembl_paths) = get_pathways(args)
    
    for r in c:
        gt_types = np.array(cPickle.loads(zlib.decompress(r['gt_types'])))
        gts      = np.array(cPickle.loads(zlib.decompress(r['gts'])))        
        gene     = str(r['gene'])
        trans    = str(r['transcript'])
        
        pathways = []
        if (gene, trans) in agn_paths:
            pathways = _get_pathways(gene, trans, agn_paths[(gene, trans)], 
                            allow_none=False)
        elif (gene, trans) in hgnc_paths:
            pathways = _get_pathways(gene, trans, hgnc_paths[(gene, trans)],
                            allow_none=False)
        elif (gene, trans) in ensembl_paths:
            pathways = _get_pathways(gene, trans, ensembl_paths[(gene, trans)],
                            allow_none=False)
        pathlist = ",".join(pathways)
        for idx, gt_type in enumerate(gt_types):
            if (gt_type == HET or gt_type == HOM_ALT) and \
                len(pathways) > 0:
                print "\t".join([r['chrom'], str(r['start']), str(r['end']), \
                                 r['ref'], r['alt'], r['impact'], \
                                 idx_to_sample[idx], gts[idx], gene, trans, \
                                 pathlist])
Пример #17
0
    def statusUpdate(self, driver, status):
        tid = status.task_id.value
        state = status.state
        logger.debug("status update: %s %s", tid, state)

        jid = self.taskIdToJobId.get(tid)
        if jid in self.activeJobs:
            if self.isFinished(state):
                del self.taskIdToJobId[tid]
                self.jobTasks[jid].remove(tid)
                slave_id = self.taskIdToSlaveId[tid]
                self.slaveTasks[slave_id] -= 1
                del self.taskIdToSlaveId[tid]
           
                if state in (mesos_pb2.TASK_FINISHED, mesos_pb2.TASK_FAILED) and status.data:
                    try:
                        tid,reason,result,accUpdate = cPickle.loads(status.data)
                        if result:
                            flag, data = result
                            if flag >= 2:
                                data = open(data).read()
                                flag -= 2
                            if flag == 0:
                                result = marshal.loads(data)
                            else:
                                result = cPickle.loads(data)
                        return self.activeJobs[jid].statusUpdate(tid, state, 
                            reason, result, accUpdate)
                    except EOFError, e:
                        logger.warning("error when cPickle.loads(): %s, data:%s", e, len(status.data))

                # killed, lost, load failed
                tid = int(tid.split(':')[1])
                self.activeJobs[jid].statusUpdate(tid, state, status.data)
                self.slaveFailed[slave_id] = self.slaveFailed.get(slave_id,0) + 1
Пример #18
0
    def test_pickle(self):
        from numpy import dtype, zeros
        import sys
        try:
            from numpy.core.multiarray import scalar
        except ImportError:
            # running on dummy module
            from numpy import scalar
        from cPickle import loads, dumps
        i = dtype('int32').type(1337)
        f = dtype('float64').type(13.37)
        c = dtype('complex128').type(13 + 37.j)

        swap = lambda s: (''.join(reversed(s))) if sys.byteorder == 'big' else s
        assert i.__reduce__() == (scalar, (dtype('int32'), swap('9\x05\x00\x00')))
        assert f.__reduce__() == (scalar, (dtype('float64'), swap('=\n\xd7\xa3p\xbd*@')))
        assert c.__reduce__() == (scalar, (dtype('complex128'), swap('\x00\x00\x00\x00\x00\x00*@') + \
                                                                swap('\x00\x00\x00\x00\x00\x80B@')))

        assert loads(dumps(i)) == i
        assert loads(dumps(f)) == f
        assert loads(dumps(c)) == c

        a = zeros(3)
        assert loads(dumps(a.sum())) == a.sum()
Пример #19
0
 def test_pickling(self):
     """intbitset - pickling"""
     import cPickle
     for set1 in self.sets + [[]]:
         self.assertEqual(intbitset(set1), cPickle.loads(cPickle.dumps(intbitset(set1), -1)))
     for set1 in self.sets + [[]]:
         self.assertEqual(intbitset(set1, trailing_bits=True), cPickle.loads(cPickle.dumps(intbitset(set1, trailing_bits=True), -1)))
Пример #20
0
    def get_notifications(self, chname, keys=None, reset=False):
        """
        Returns list of notifications stored in Redis DB

        :param str chname: Channel name where to retrieve notifications
        :param bool reset: Reset notifications after returning
        :param dict keys: Optional list of fields to fetch
        :returns:

        >>> ns = self.env[NotificationSystem]
        >>> ns.get_notifications('uid-123')
        [{message-123:{notification}, message-4:{notification}}]
        >>> ns.get_notifications('uid-123', fields=['message-4'])
        [{message-4:{notification}}]

        """
        notifications = []
        redis = self._get_redis()

        # Iterate through keys and retrieve the hashes with them
        if keys:
            notifications = [pickle.loads(notification) for notification in list(redis.hmget(chname, keys)) if notification]
            if reset:
                redis.hdel(chname, *keys)

        else:
            notifications = [pickle.loads(notification) for key, notification in redis.hgetall(chname).items()]
            if reset:
                redis.delete(chname)

        return notifications
Пример #21
0
def test_ttest_ind():
    "Test testnd.ttest_ind()"
    ds = datasets.get_uts(True)

    # basic
    res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds)
    repr(res)
    assert_less(res.p_uncorrected.min(), 0.05)
    # persistence
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    repr(res_)
    assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected)

    # cluster
    res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds, tail=1, samples=1)
    # persistence
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    assert_equal(repr(res_), repr(res))
    assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected)

    # nd
    res = testnd.ttest_ind('utsnd', 'A', 'a1', 'a0', ds=ds, pmin=0.05, samples=2)
    eq_(res._cdist.n_clusters, 10)
Пример #22
0
    def test_pickle_dump_load(self):
        # Wipe current cache
        DescriptorMemoryElement.MEMORY_CACHE = {}

        # Make a couple descriptors
        v1 = numpy.array([1, 2, 3])
        d1 = DescriptorMemoryElement('test', 0)
        d1.set_vector(v1)

        v2 = numpy.array([4, 5, 6])
        d2 = DescriptorMemoryElement('test', 1)
        d2.set_vector(v2)

        ntools.assert_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)

        d1_s = cPickle.dumps(d1)
        d2_s = cPickle.dumps(d2)

        # Wipe cache again
        DescriptorMemoryElement.MEMORY_CACHE = {}
        ntools.assert_not_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_not_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)

        # Attempt reconstitution
        d1_r = cPickle.loads(d1_s)
        d2_r = cPickle.loads(d2_s)

        numpy.testing.assert_array_equal(v1, d1_r.vector())
        numpy.testing.assert_array_equal(v2, d2_r.vector())

        # Cache should now have those entries back in it
        ntools.assert_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)
Пример #23
0
	def trendcounter90(self):
		self.closeMysqlConn()
		self.initMysqlConns()

		qry="""SELECT trenddate, pickledump 
				FROM `trendhisory` AS a, (SELECT MIN(trenddate) AS mini, MAX(trenddate) AS maxi FROM `trendhisory` WHERE trenddate > NOW()-INTERVAL """+str(3*self.CHANGE_COMPARE_DAYS)+""" DAY) AS m
				WHERE  m.maxi = a.trenddate
				OR m.mini = a.trenddate ORDER BY trenddate"""
		self.c.execute(qry)
		crows = self.c.fetchall()
		if not len(crows) == 2:
			print "crows != 2, exit."
			return
		min_row=crows[0]
		max_row=crows[1]
		min_date=str(min_row[0])
		min_pickle=pickle.loads(str(min_row[1]))
		max_date=str(max_row[0])
		max_pickle=pickle.loads(str(max_row[1]))
		for i,j in max_pickle.iteritems():
			max_termcnt=j
			if i in min_pickle:
				min_termcnt=min_pickle[i]
				cur_trend=min_termcnt[1]-max_termcnt[1]
				print min_date, i, " / ",str(min_termcnt[1]), " => ", max_date, " / ",str(max_termcnt[1]), "\t change:", str(cur_trend)
				if not cur_trend:
					cur_trend=0
				uqry="UPDATE `mimox_index` SET TermTrend90 = '%s', TermTrendChangeDate90='%s' WHERE TermName = '%s'" % (cur_trend, self.NOW, i)
				try:
					self.c.execute(uqry)
				except Exception,e:
					print e
					pass
Пример #24
0
def test_anova():
    "Test testnd.anova()"
    plot.configure_backend(False, False)
    ds = datasets.get_rand(True)

    testnd.anova('utsnd', 'A*B', ds=ds)

    res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=0, pmin=0.05)
    repr(res)
    p = plot.Array(res)
    p.close()

    res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=2, pmin=0.05)
    repr(res)
    p = plot.Array(res)
    p.close()

    # persistence
    string = pickle.dumps(res, protocol=pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    assert_equal(repr(res_), repr(res))

    # test multi-effect results (with persistence)
    # UTS
    res = testnd.anova('uts', 'A*B*rm', ds=ds, samples=5)
    repr(res)
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res = pickle.loads(string)
    tfce_clusters = res.tfce_clusters(pmin=0.05)
    peaks = res.tfce_peaks()
    assert_equal(tfce_clusters.eval("p.min()"), peaks.eval("p.min()"))
    unmasked = res.f[0]
    masked = res.masked_parameter_map(effect=0, pmin=0.05)
    assert_array_equal(masked.x <= unmasked.x, True)
Пример #25
0
  def testArtistRatingPickle(self):
    Factories.clear()
    trackA= TrackFactory.get(self.artist, self.titleA)
    trackB = TrackFactory.get(self.artist, self.titleB)
    artist = trackA.artist
    trackA.played()
    trackB.skipped()
    rating = artist.rating
    i = 0
    for r in RatingFactory.ratings():
      i+=1
    # track init rating: 1
    # artist init rating: 2
    # track play rating: 3
    # artist play rating: 4
    # track skip rating: 5
    # artist skip rating: 6
    self.assertEqual(i, 6)

    state = pickle.dumps(Factories.getstate())
    Factories.clear()
    pickle.loads(state)

    i = 0
    for r in RatingFactory.ratings():
      i+=1
    self.assertEqual(i, 6)

    track = TrackFactory.get(self.artist, self.titleA)
    artist = track.artist
    self.assertEqual(artist.rating, rating)
  def testSerialization(self):
    params = {
      'inputDimensions' : [2,4,5,2],
      'columnDimensions' : [4,3,3],
      'potentialRadius' : 30,
      'potentialPct' : 0.7,
      'globalInhibition' : False,
      'localAreaDensity' : 0.23,
      'numActiveColumnsPerInhArea' : 0,
      'stimulusThreshold' : 2,
      'synPermInactiveDec' : 0.02,
      'synPermActiveInc' : 0.1,
      'synPermConnected' : 0.12,
      'minPctOverlapDutyCycle' : 0.011,
      'minPctActiveDutyCycle' : 0.052,
      'dutyCyclePeriod' : 25,
      'maxBoost' : 11.0,
      'seed' : 19,
      'spVerbosity' : 0
    }
    sp1 = self.createSp("py", params)
    sp2 = pickle.loads(pickle.dumps(sp1))
    self.compare(sp1, sp2)

    sp1 = self.createSp("cpp", params)
    sp2 = pickle.loads(pickle.dumps(sp1))
    self.compare(sp1, sp2)
 def handle_event_crawler_reply(
     self, permid, selversion, channel_id, channel_data, error, message, request_callback
 ):
     if error:
         if DEBUG:
             print >>sys.stderr, "videoplaybackcrawler: handle_crawler_reply", error, message
         self._file.write(
             "; ".join(
                 (
                     strftime("%Y/%m/%d %H:%M:%S"),
                     "  EVENT REPLY",
                     show_permid(permid),
                     str(error),
                     str(channel_data),
                     message,
                     "\n",
                 )
             )
         )
         self._file.flush()
     elif selversion >= OLPROTO_VER_TENTH:
         if DEBUG:
             print >>sys.stderr, "videoplaybackcrawler: handle_crawler_reply", show_permid_short(permid), len(
                 message
             ), "bytes zipped"
         info = cPickle.loads(zlib.decompress(message))
         self._file.write(
             "; ".join(
                 (
                     strftime("%Y/%m/%d %H:%M:%S"),
                     "  EVENT REPLY",
                     show_permid(permid),
                     str(error),
                     str(channel_data),
                     str(info),
                     "\n",
                 )
             )
         )
         self._file.flush()
     elif selversion >= OLPROTO_VER_EIGHTH:
         if DEBUG:
             print >>sys.stderr, "videoplaybackcrawler: handle_crawler_reply", show_permid_short(
                 permid
             ), cPickle.loads(message)
         info = cPickle.loads(message)
         self._file.write(
             "; ".join(
                 (
                     strftime("%Y/%m/%d %H:%M:%S"),
                     "  EVENT REPLY",
                     show_permid(permid),
                     str(error),
                     str(channel_data),
                     str(info),
                     "\n",
                 )
             )
         )
         self._file.flush()
Пример #28
0
    def handle_data(self):
        self.send_response(200)
        self.end_headers()
        method = self.body['method'].value
        basename = self.body['basename'].value
        args = self.body['args'].value
        kw = self.body['kw'].value
        if args:
            args = cPickle.loads(args)
        else:
            args = []
        if kw:
            kw = cPickle.loads(kw)
        else:
            kw = {}
        if method == "__init__":
            self.open_bases[basename] = Base(basename)
        else:
            if not self.open_bases.has_key(basename):
                self.wfile.write('no base named %s' %basename)
            else:
                if hasattr(self,method):
                    self.db = self.open_bases[basename]
                    getattr(self,method)(*args,**kw)

        self.finish()    
Пример #29
0
    def test_pickling(self):

        fsm = Fysom({
            'initial': 'green',
            'events': [
                {'name': 'warn', 'src': 'green', 'dst': 'yellow'},
                {'name': 'panic', 'src': 'yellow', 'dst': 'red'},
                {'name': 'calm', 'src': 'red', 'dst': 'yellow'},
                {'name': 'clear', 'src': 'yellow', 'dst': 'green'}
            ]
        })

        pickled = dumps(fsm)
        assert pickled
        fsm = loads(pickled)

        self.assertTrue(isinstance(fsm, Fysom))
        self.assertEquals('green', fsm.current)

        fsm.warn()
        pickled = dumps(fsm)
        assert pickled
        fsm = loads(pickled)

        self.assertEquals('yellow', fsm.current)
Пример #30
0
 def get_object(self, id=None,use_cache=True):
     session=self.session()
     raw_id = "o-%s" % tob(id)
     if use_cache:
         try:
             if session.check_obj(raw_id):
                 data = session.get_obj(raw_id)
                 if "_auto_id" in data:
                     return loads(data)
         except:
             log(ERROR,"CACHE read error")
     sql = "SELECT `auto_id`,`id`,`object`,`updated` FROM `"+self.table+"` WHERE `id`=%s"
     rows = session.connection.query(sql, tou(id))
     if rows:
         data = rows[0].object
         objstr = decompress(data)
         obj = loads(objstr)
         obj.update(dict(id=id))
         obj["_updated"] = rows[0].updated
         obj["_auto_id"] =rows[0].auto_id
         if use_cache:
             try:
                 session.set_obj(raw_id,dumps(obj))
             except:
                 log(ERROR,"CACHE write error")
         return obj
Пример #31
0
 def _decode_request(self, encoded_request):
     """Decode an request previously encoded"""
     return request_from_dict(pickle.loads(encoded_request), self.spider)
Пример #32
0
def start(data):
    # Make sure we are working with an existing SIP record
    try:
        models.SIP.objects.get(pk=data.uuid)
    except models.SIP.DoesNotExist:
        error("UUID not recognized")

    # Get directory
    jobs = models.Job.objects.filter(sipuuid=data.uuid, jobtype="Upload DIP")
    if jobs.count():
        directory = jobs[0].directory.rstrip('/').replace(
            '%sharedPath%', '/var/archivematica/sharedDirectory/')
    else:
        error("Directory not found: %s" % directory)

    # Check if exists
    if os.path.exists(directory) is False:
        log("Directory not found: %s" % directory)

        # Trying with uploadedDIPs
        log("Looking up uploadedDIPs/")
        directory = directory.replace('uploadDIP', 'uploadedDIPs')

        if os.path.exists(directory) is False:
            error("Directory not found: %s" % directory)

    try:
        # This upload was called before, restore Access record
        access = models.Access.objects.get(sipuuid=data.uuid)
    except:  # First time this job is called, create new Access record
        access = models.Access(sipuuid=data.uuid)
        access.save()

    # The target columns contents a serialized Python dictionary
    # - target is the permalink string
    try:
        target = cPickle.loads(str(access.target))
        log("Target: %s" % (target['target']))
    except:
        error("No target was selected")

    # Rsync if data.rsync_target option was passed to this script
    if data.rsync_target:
        """ Build command (rsync)
          -a =
            -r = recursive
            -l = recreate symlinks on destination
            -p = set same permissions
            -t = transfer modification times
            -g = set same group owner on destination
            -o = set same user owner on destination (if possible, super-user)
            --devices = transfer character and block device files (only super-user)
            --specials = transfer special files like sockets and fifos
          -z = compress
          -P = --partial + --stats
        """
        # Using rsync -rltzP
        command = [
            "rsync", "--protect-args", "-rltz", "-P", "--chmod=ugo=rwX",
            directory, data.rsync_target
        ]

        # Add -e if data.rsync_command was passed to this script
        if data.rsync_command:
            # Insert in second position. Example: rsync -e "ssh -i key" ...
            command.insert(1, "-e %s" % data.rsync_command)

        log(' '.join(command))

        # Getting around of rsync output buffering by outputting to a temporary file
        pipe_output, file_name = tempfile.mkstemp()
        log("Rsync output is being saved in %s" % file_name)

        # Call Rsync
        process = subprocess.Popen(command,
                                   stdout=pipe_output,
                                   stderr=pipe_output)

        # poll() returns None while the process is still running
        while process.poll() is None:
            time.sleep(1)
            last_line = open(file_name).readlines()

            # It's possible that it hasn't output yet, so continue
            if len(last_line) == 0:
                continue
            last_line = last_line[-1]

            # Matching to "[bytes downloaded]  number%  [speed] number:number:number"
            match = re.match(".* ([0-9]*)%.* ([0-9]*:[0-9]*:[0-9]*).*",
                             last_line)

            if not match:
                continue

            # Update upload status
            # - percentage in match.group(1)
            # - ETA in match.group(2)
            access.status = "Sending... %s (ETA: %s)" % (match.group(1),
                                                         match.group(2))
            access.statuscode = 10
            access.save()
            log(access.status)

        # We don't need the temporary file anymore!
        # log("Removing temporary rsync output file: %s" % file_name)
        # os.unlink(file_name)

        # At this point, we should have a return code
        # If greater than zero, see man rsync (EXIT VALUES)
        access.exitcode = process.returncode
        if 0 < process.returncode:
            access.statuscode = 12
        else:
            access.statuscode = 11
        access.save()

        if 0 < process.returncode:
            error(
                "Rsync quit unexpectedly (exit %s), the upload script will be stopped here"
                % process.returncode)

    # Building headers dictionary for the deposit request
    headers = {}
    headers['User-Agent'] = 'Archivematica'
    headers[
        'X-Packaging'] = 'http://purl.org/net/sword-types/METSArchivematicaDIP'
    """ headers['X-On-Beahalf-Of'] """
    headers['Content-Type'] = 'application/zip'
    headers['X-No-Op'] = 'false'
    headers['X-Verbose'] = 'false'
    headers['Content-Location'] = "file:///%s" % os.path.basename(directory)
    """ headers['Content-Disposition'] """

    # Build URL (expected sth like http://localhost/ica-atom/index.php)
    atom_url_prefix = ';' if data.version == 1 else ''
    data.url = "%s/%ssword/deposit/%s" % (data.url, atom_url_prefix,
                                          target['target'])

    # Auth and request!
    log("About to deposit to: %s" % data.url)
    access.statuscode = 13
    access.resource = data.url
    access.save()
    auth = requests.auth.HTTPBasicAuth(data.email, data.password)

    # Disable redirects: AtoM returns 302 instead of 202, but Location header field is valid
    response = requests.request(
        'POST',
        data.url,
        auth=auth,
        headers=headers,
        allow_redirects=False,
        timeout=mcpclient_settings.AGENTARCHIVES_CLIENT_TIMEOUT)

    # response.{content,headers,status_code}
    log("> Response code: %s" % response.status_code)
    log("> Location: %s" % response.headers.get('Location'))

    if data.debug:
        # log("> Headers sent: %s" % headers)
        # log("> Headers received: %s" % response.headers)
        log("> Content received: %s" % response.content)

    # Check AtoM response status code
    if response.status_code not in [200, 201, 302]:
        error("Response code not expected")

    # Location is a must, if it is not included in the AtoM response something was wrong
    if response.headers['Location'] is None:
        error(
            "Location is expected, if not is likely something is wrong with AtoM"
        )
    else:
        access.resource = data.url

    # (A)synchronously?
    if response.status_code == 302:
        access.status = "Deposited asynchronously, AtoM is processing the DIP in the job queue"
        log(access.status)
    else:
        access.statuscode = 14
        access.status = "Deposited synchronously"
        log(access.status)
    access.save()
Пример #33
0
    def connect(
        self,
        request,
        response,
        db=None,
        tablename='web2py_session',
        masterapp=None,
        migrate=True,
        separate=None,
        check_client=False,
    ):
        """
        separate can be separate=lambda(session_name): session_name[-2:]
        and it is used to determine a session prefix.
        separate can be True and it is set to session_name[-2:]
        """
        if separate == True:
            separate = lambda session_name: session_name[-2:]
        self._unlock(response)
        if not masterapp:
            masterapp = request.application
        response.session_id_name = 'session_id_%s' % masterapp.lower()

        if not db:
            if global_settings.db_sessions is True or masterapp in global_settings.db_sessions:
                return
            response.session_new = False
            client = request.client and request.client.replace(':', '.')
            if response.session_id_name in request.cookies:
                response.session_id = \
                    request.cookies[response.session_id_name].value
                if regex_session_id.match(response.session_id):
                    response.session_filename = \
                        os.path.join(up(request.folder), masterapp,
                            'sessions', response.session_id)
                else:
                    response.session_id = None
            if response.session_id:
                try:
                    response.session_file = \
                        open(response.session_filename, 'rb+')
                    try:
                        portalocker.lock(response.session_file,
                                         portalocker.LOCK_EX)
                        response.session_locked = True
                        self.update(cPickle.load(response.session_file))
                        response.session_file.seek(0)
                        oc = response.session_filename.split('/')[-1].split(
                            '-')[0]
                        if check_client and client != oc:
                            raise Exception, "cookie attack"
                    finally:
                        pass
                        #This causes admin login to break. Must find out why.
                        #self._close(response)
                except:
                    response.session_id = None
            if not response.session_id:
                uuid = web2py_uuid()
                response.session_id = '%s-%s' % (client, uuid)
                if separate:
                    prefix = separate(response.session_id)
                    response.session_id = '%s/%s' % (prefix,
                                                     response.session_id)
                response.session_filename = \
                    os.path.join(up(request.folder), masterapp,
                                 'sessions', response.session_id)
                response.session_new = True
        else:
            if global_settings.db_sessions is not True:
                global_settings.db_sessions.add(masterapp)
            response.session_db = True
            if response.session_file:
                self._close(response)
            if settings.global_settings.web2py_runtime_gae:
                # in principle this could work without GAE
                request.tickets_db = db
            if masterapp == request.application:
                table_migrate = migrate
            else:
                table_migrate = False
            tname = tablename + '_' + masterapp
            table = db.get(tname, None)
            if table is None:
                table = db.define_table(
                    tname,
                    db.Field('locked', 'boolean', default=False),
                    db.Field('client_ip', length=64),
                    db.Field('created_datetime',
                             'datetime',
                             default=request.now),
                    db.Field('modified_datetime', 'datetime'),
                    db.Field('unique_key', length=64),
                    db.Field('session_data', 'blob'),
                    migrate=table_migrate,
                )
            try:
                key = request.cookies[response.session_id_name].value
                (record_id, unique_key) = key.split(':')
                if record_id == '0':
                    raise Exception, 'record_id == 0'
                rows = db(table.id == record_id).select()
                if len(rows) == 0 or rows[0].unique_key != unique_key:
                    raise Exception, 'No record'

                # rows[0].update_record(locked=True)

                session_data = cPickle.loads(rows[0].session_data)
                self.update(session_data)
            except Exception:
                record_id = None
                unique_key = web2py_uuid()
                session_data = {}
            response._dbtable_and_field = \
                (response.session_id_name, table, record_id, unique_key)
            response.session_id = '%s:%s' % (record_id, unique_key)
        response.cookies[response.session_id_name] = response.session_id
        response.cookies[response.session_id_name]['path'] = '/'
        self.__hash = hashlib.md5(str(self)).digest()
        if self.flash:
            (response.flash, self.flash) = (self.flash, None)
    def fit_models(self, X, y):
        """Perform internal cross-validation fit"""

        if (self.verbose):
            sys.stderr.write('\nfitting models:\n')

        if (self.use_bootstrap):
            n = X.shape[0]
            rs = check_random_state(self.random_state)
            self._folds = [_bootstraps(n, rs) for _ in xrange(self.n_folds)]
        else:
            self._folds = list(KFold(len(y), n_folds=self.n_folds))

        select_stmt = "select pickled_model from models where model_idx = ?"
        insert_stmt = """insert into fitted_models
                             (model_idx, fold_idx, pickled_model)
                         values (?,?,?)"""

        db_conn = sqlite3.connect(self.db_file)
        curs = db_conn.cursor()

        for model_idx in xrange(self._n_models):

            curs.execute(select_stmt, [model_idx])
            pickled_model = curs.fetchone()[0]
            model = loads(str(pickled_model))

            model_folds = []

            for fold_idx, fold in enumerate(self._folds):
                train_inds, _ = fold
                if self.sweight:
                    model.fit(X[train_inds],
                              y[train_inds],
                              sample_weight=X[train_inds, self.sweight])
                    '''
                    if str(self.models[model_idx].__class__)[8:15]=='sklearn':
                        model.fit(X[train_inds], y[train_inds], sample_weight=X[train_inds, self.sweight])
                    elif str(self.models.__class__)[8:15]=='xgboost':
                        model.fit(X[train_inds], y[train_inds], weight=X[train_inds, self.sweight])
                    '''
                else:
                    model.fit(X[train_inds], y[train_inds])
                pickled_model = buffer(dumps(model, protocol=2))
                model_folds.append((model_idx, fold_idx, pickled_model))

            with db_conn:
                db_conn.executemany(insert_stmt, model_folds)

            if (self.verbose):
                if ((model_idx + 1) % 50 == 0):
                    sys.stderr.write('%d\n' % (model_idx + 1))
                else:
                    sys.stderr.write('.')

        if (self.verbose):
            sys.stderr.write('\n')

        with db_conn:
            stmt = """create index fitted_models_index
                      on fitted_models (model_idx, fold_idx)"""

            db_conn.execute(stmt)

        db_conn.close()
    def _init_db(self, models):
        """Initialize database"""

        # db setup script
        _createTablesScript = """
            create table models (
                model_idx      integer UNIQUE NOT NULL,
                pickled_model  blob NOT NULL
            );

            create table fitted_models (
                model_idx      integer NOT NULL,
                fold_idx       integer NOT NULL,
                pickled_model  blob NOT NULL
            );

            create table model_scores (
                model_idx      integer UNIQUE NOT NULL,
                score          real NOT NULL,
                probs          blob NOT NULL
            );

            create table ensemble (
                model_idx      integer NOT NULL,
                weight         integer NOT NULL
            );
        """

        # barf if db file exists and we're making a new model
        if (models and os.path.exists(self.db_file)):
            raise ValueError("db_file '%s' already exists!" % self.db_file)

        db_conn = sqlite3.connect(self.db_file)
        with db_conn:
            db_conn.execute("pragma journal_mode = off")
            # db_conn.execute("PRAGMA auto_vacuum = FULL;")

        if (models):
            # build database
            with db_conn:
                db_conn.executescript(_createTablesScript)

            # populate model table
            insert_stmt = """insert into models (model_idx, pickled_model)
                             values (?, ?)"""
            with db_conn:
                vals = ((i, buffer(dumps(m, protocol=2)))
                        for i, m in enumerate(models))
                db_conn.executemany(insert_stmt, vals)
                create_stmt = "create index models_index on models (model_idx)"
                db_conn.execute(create_stmt)

            self._n_models = len(models)

        else:
            curs = db_conn.cursor()
            curs.execute("select count(*) from models")
            self._n_models = curs.fetchone()[0]

            curs.execute("select model_idx, weight from ensemble")
            for k, v in curs.fetchall():
                self._ensemble[k] = v

            # clumsy hack to get n_classes
            curs.execute("select probs from model_scores limit 1")
            r = curs.fetchone()
            probs = loads(str(r[0]))
            self._n_classes = probs.shape[1]

        db_conn.close()
Пример #36
0
 def get(self, key):
     rv = self._uwsgi.cache_get(key, self.cache)
     if rv is None:
         return
     return pickle.loads(rv)
Пример #37
0
 def result(self):
     return cPickle.loads(
         self._host.ssh.ftp.getContents("/tmp/result%s.pickle" %
                                        self._unique))
Пример #38
0
import cPickle as pickle


class SomeObject(object):
    def __eq__(self, other):
        """Default __eq__ would always return False."""
        return True

    pass


a = (1, 2)
b = "zwei"
c = [3, 4, 5, 6]
d = c
e = SomeObject()
f = e
before_pickle = [a, b, c, d, e, f]

after_pickle = pickle.loads(pickle.dumps(before_pickle))

# should get equal objects, but not the same:
for i in range(6):
    assert before_pickle[i] == after_pickle[i]
    assert id(before_pickle[i]) != id(after_pickle[i])

# same objects stay the same:
assert id(after_pickle[2]) == id(after_pickle[3])
assert id(after_pickle[4]) == id(after_pickle[5])
Пример #39
0
 def test_pickle_unpickle_empty(self):
     """Test that an empty AtomGroup can be pickled/unpickled (Issue 293)"""
     ag = AtomGroup([])
     pickle_str = cPickle.dumps(ag, protocol=cPickle.HIGHEST_PROTOCOL)
     newag = cPickle.loads(pickle_str)
     assert_equal(len(newag), 0)
ap.add_argument(
    "-s",
    "--max-buffer-size",
    type=int,
    default=100,
    help="Maximum buffer size for # of features to be stored in memory")
ap.add_argument("-l",
                "--levels",
                type=int,
                default=2,
                help="# of pyramid levels to generate")
args = vars(ap.parse_args())

# load the codebook vocabulary, then initialize the bag-of-visual-words transformer
# and the pyramid of bag-of-visual-words descriptor
vocab = cPickle.loads(open(args["codebook"]).read())
bovw = BagOfVisualWords(vocab)
pbow = PBOW(bovw, numLevels=args["levels"])

# open the features database and initialize the bag-of-visual-words indexer
featureDim = PBOW.featureDim(bovw.codebook.shape[0], args["levels"])
featuresDB = h5py.File(args["features_db"], mode="r")
bi = BOVWIndexer(featureDim,
                 args["pbow_db"],
                 estNumImages=featuresDB["image_ids"].shape[0],
                 maxBufferSize=args["max_buffer_size"])

# loop over the image IDs
for (i, imageID) in enumerate(featuresDB["image_ids"]):
    # grab the image dimensions, along with the index lookup values from the
    # database
Пример #41
0
    def __init__(self, *args):
        BaseComponents.BlissWidget.__init__(self, *args)

        # map displayed string in the history list -> actual file path
        self.history_map = dict()

        self.layout = QVBoxLayout(self)

        self.defineSlot("load_file", ())
        self.defineSlot("login_changed", ())
        self.addProperty("mnemonic", "string", "")
        self.addProperty("history", "string", "", hidden=True)
        self.addProperty("sessions ttl (in days)", "integer", "30")

        # make sure the history property is a pickled dict
        try:
            hist = pickle.loads(self.getProperty("history").getValue())
        except BaseException:  # EOFError if the string is empty but let's not count on it
            self.getProperty("history").setValue(pickle.dumps(dict()))

        # maybe defer that for later
        self.cleanup_history()

        self.main_layout = QSplitter(self)
        self.main_layout.setSizePolicy(
            QSizePolicy(QSizePolicy.MinimumExpanding,
                        QSizePolicy.MinimumExpanding))

        # left part of the splitter
        self.history_box = QVBox(self.main_layout)
        self.history_box.setSizePolicy(QSizePolicy.Preferred,
                                       QSizePolicy.Preferred)

        self.sort_order = True

        self.sort_col = None

        self.history = QTable(self.history_box)
        self.history.setSizePolicy(
            QSizePolicy(QSizePolicy.Minimum, QSizePolicy.MinimumExpanding))
        self.history.setSelectionMode(QTable.SingleRow)
        self.history.setNumCols(3)
        self.history.verticalHeader().hide()
        self.history.setLeftMargin(0)
        self.history.setSorting(False)
        QObject.connect(self.history, SIGNAL("currentChanged(int,int)"),
                        self.history_changed)

        # by default sorting only sorts the columns and not whole rows.
        # let's reimplement that
        QObject.connect(self.history.horizontalHeader(),
                        SIGNAL("clicked(int)"), self.sort_column)

        header = self.history.horizontalHeader()
        header.setLabel(0, "Time and date")
        header.setLabel(1, "Prefix")
        header.setLabel(2, "Run number")

        self.clear_history_button = QPushButton("Clear history",
                                                self.history_box)
        self.history_box.setSizePolicy(QSizePolicy.Preferred,
                                       QSizePolicy.Fixed)
        QObject.connect(self.clear_history_button, SIGNAL("clicked()"),
                        self.clear_history)

        # Right part of the splitter
        self.browser_box = QWidget(self.main_layout)
        QVBoxLayout(self.browser_box)
        self.browser_box.setSizePolicy(QSizePolicy.MinimumExpanding,
                                       QSizePolicy.MinimumExpanding)

        self.top_layout = QHBoxLayout(self.browser_box)

        self.back_button = QToolButton(self.browser_box)
        self.back_button.setIconSet(QIconSet(Icons.load("Left2")))
        self.back_button.setTextLabel("Back")
        self.back_button.setUsesTextLabel(True)
        self.back_button.setTextPosition(QToolButton.BelowIcon)
        self.back_button.setSizePolicy(
            QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum))

        self.forward_button = QToolButton(self.browser_box)
        self.forward_button.setIconSet(QIconSet(Icons.load("Right2")))
        self.forward_button.setTextLabel("Forward")
        self.forward_button.setUsesTextLabel(True)
        self.forward_button.setTextPosition(QToolButton.BelowIcon)
        self.forward_button.setSizePolicy(
            QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum))

        self.top_layout.addWidget(self.back_button)
        self.top_layout.addWidget(self.forward_button)

        self.browser_box.layout().addLayout(self.top_layout)

        self.browser = QTextBrowser(self.browser_box)
        self.browser.setReadOnly(True)
        self.browser_box.layout().addWidget(self.browser)

        self.layout.addWidget(self.main_layout)

        # initially disabled
        self.forward_button.setEnabled(False)
        self.back_button.setEnabled(False)
        # connections
        QObject.connect(self.browser, SIGNAL("backwardAvailable(bool)"),
                        self.back_button.setEnabled)
        QObject.connect(
            self.browser,
            SIGNAL("forwardAvailable(bool)"),
            self.forward_button.setEnabled,
        )
        QObject.connect(self.back_button, SIGNAL("clicked()"),
                        self.browser.backward)
        QObject.connect(self.forward_button, SIGNAL("clicked()"),
                        self.browser.forward)

        self.edna = None
Пример #42
0
 def assertFileData(file_path, data):
     with open(file_path, 'r') as fp:
         fdata = fp.read()
         self.assertEquals(pickle.loads(fdata), pickle.loads(data))
Пример #43
0
		print "Testing sending a unicode-string key...",
		try:
			x = mc.set(u'keyhere', 1)
		except Client.MemcachedStringEncodingError, msg:
			print "OK",
		else:
			print "FAIL",
		try:
			x = mc.set((u'a'*SERVER_MAX_KEY_LENGTH).encode('utf-8'), 1)
		except:
			print "FAIL",
		else:
			print "OK",
		import pickle
		s = pickle.loads('V\\u4f1a\np0\n.')
		try:
			x = mc.set((s*SERVER_MAX_KEY_LENGTH).encode('utf-8'), 1)
		except Client.MemcachedKeyLengthError:
			print "OK"
		else:
			print "FAIL"

		print "Testing using a value larger than the memcached value limit...",
		x = mc.set('keyhere', 'a'*SERVER_MAX_VALUE_LENGTH)
		if mc.get('keyhere') == None:
			print "OK",
		else:
			print "FAIL",
		x = mc.set('keyhere', 'a'*SERVER_MAX_VALUE_LENGTH + 'aaa')
		if mc.get('keyhere') == None:
Пример #44
0
def view_thread():
    '''image viewing thread - this runs on the ground station'''
    from cuav.lib import cuav_mosaic
    state = mpstate.camera_state

    bsend = block_xmit.BlockSender(state.settings.gcs_view_port,
                                   bandwidth=state.settings.bandwidth)
    state.bsocket = MavSocket(mpstate.mav_master[0])
    state.bsend2 = block_xmit.BlockSender(mss=96,
                                          sock=state.bsocket,
                                          dest_ip='mavlink',
                                          dest_port=0,
                                          backlog=5,
                                          debug=False)
    state.bsend2.set_bandwidth(state.settings.bandwidth2)

    view_window = False
    image_count = 0
    thumb_count = 0
    image_total_bytes = 0
    jpeg_total_bytes = 0
    thumb_total_bytes = 0
    region_count = 0
    mosaic = None
    thumbs_received = set()
    view_dir = os.path.join(state.camera_dir, "view")
    thumb_dir = os.path.join(state.camera_dir, "thumb")
    cuav_util.mkdir_p(view_dir)
    cuav_util.mkdir_p(thumb_dir)

    img_window = mp_image.MPImage(title='Camera')

    mpstate.console.set_status('Images', 'Images %u' % image_count, row=6)
    mpstate.console.set_status('Lost', 'Lost %u' % 0, row=6)
    mpstate.console.set_status('Regions', 'Regions %u' % region_count, row=6)
    mpstate.console.set_status('JPGSize', 'JPGSize %.0f' % 0.0, row=6)
    mpstate.console.set_status('XMITQ', 'XMITQ %.0f' % 0.0, row=6)

    mpstate.console.set_status('Thumbs', 'Thumbs %u' % thumb_count, row=7)
    mpstate.console.set_status('ThumbSize', 'ThumbSize %.0f' % 0.0, row=7)
    mpstate.console.set_status('ImageSize', 'ImageSize %.0f' % 0.0, row=7)

    ack_time = time.time()

    while not state.unload.wait(0.02):
        if state.viewing:
            tnow = time.time()
            if tnow - ack_time > 0.1:
                bsend.tick(packet_count=1000,
                           max_queue=state.settings.maxqueue1)
                state.bsend2.tick(packet_count=1000,
                                  max_queue=state.settings.maxqueue2)
                if state.bsend_slave is not None:
                    state.bsend_slave.tick(packet_count=1000)
                ack_time = tnow
            if not view_window:
                view_window = True
                mosaic = cuav_mosaic.Mosaic(slipmap=mpstate.map,
                                            C=state.c_params)
                if state.boundary_polygon is not None:
                    mosaic.set_boundary(state.boundary_polygon)
                if mpstate.continue_mode:
                    reload_mosaic(mosaic)

            # check for keyboard events
            mosaic.check_events()

            buf = bsend.recv(0)
            if buf is None:
                buf = state.bsend2.recv(0)
            if buf is None:
                continue
            try:
                obj = cPickle.loads(str(buf))
                if obj == None:
                    continue
            except Exception as e:
                continue

            if state.settings.gcs_slave is not None:
                if state.bsend_slave is None:
                    state.bsend_slave = block_xmit.BlockSender(
                        0,
                        bandwidth=state.settings.bandwidth * 10,
                        debug=False)
                #print("send bsend_slave")
                state.bsend_slave.send(buf,
                                       dest=(state.settings.gcs_slave,
                                             state.settings.gcs_view_port),
                                       priority=1)

            if isinstance(obj, ThumbPacket):
                # we've received a set of thumbnails from the plane for a positive hit
                if obj.frame_time in thumbs_received:
                    continue
                thumbs_received.add(obj.frame_time)

                thumb_total_bytes += len(buf)

                # save the thumbnails
                thumb_filename = '%s/v%s.jpg' % (
                    thumb_dir, cuav_util.frame_time(obj.frame_time))
                chameleon.save_file(thumb_filename, obj.thumb)
                composite = cv.LoadImage(thumb_filename)
                thumbs = cuav_mosaic.ExtractThumbs(composite, len(obj.regions))

                # log the joe positions
                filename = '%s/v%s.jpg' % (
                    view_dir, cuav_util.frame_time(obj.frame_time))
                pos = obj.pos
                log_joe_position(pos, obj.frame_time, obj.regions, filename,
                                 thumb_filename)

                # update the mosaic and map
                mosaic.set_brightness(state.settings.brightness)
                mosaic.add_regions(obj.regions, thumbs, filename, pos=pos)

                # update console display
                region_count += len(obj.regions)
                state.frame_loss = obj.frame_loss
                state.xmit_queue = obj.xmit_queue
                thumb_count += 1

                mpstate.console.set_status('Lost',
                                           'Lost %u' % state.frame_loss)
                mpstate.console.set_status('Regions',
                                           'Regions %u' % region_count)
                mpstate.console.set_status('XMITQ',
                                           'XMITQ %.0f' % state.xmit_queue)
                mpstate.console.set_status('Thumbs', 'Thumbs %u' % thumb_count)
                mpstate.console.set_status(
                    'ThumbSize',
                    'ThumbSize %.0f' % (thumb_total_bytes / thumb_count))

            if isinstance(obj, ImagePacket):
                # we have an image from the plane
                image_total_bytes += len(buf)

                state.xmit_queue = obj.xmit_queue
                mpstate.console.set_status('XMITQ',
                                           'XMITQ %.0f' % state.xmit_queue)

                # save it to disk
                filename = '%s/v%s.jpg' % (
                    view_dir, cuav_util.frame_time(obj.frame_time))
                chameleon.save_file(filename, obj.jpeg)
                img = cv.LoadImage(filename)
                if img.width == 1280:
                    display_img = cv.CreateImage((640, 480), 8, 3)
                    cv.Resize(img, display_img)
                else:
                    display_img = img

                mosaic.add_image(obj.frame_time, filename, obj.pos)

                cv.ConvertScale(display_img,
                                display_img,
                                scale=state.settings.brightness)
                img_window.set_image(display_img, bgr=True)

                # update console
                image_count += 1
                jpeg_total_bytes += len(obj.jpeg)
                state.jpeg_size = 0.95 * state.jpeg_size + 0.05 * len(obj.jpeg)
                mpstate.console.set_status('Images', 'Images %u' % image_count)
                mpstate.console.set_status(
                    'JPGSize',
                    'JPG Size %.0f' % (jpeg_total_bytes / image_count))
                mpstate.console.set_status(
                    'ImageSize',
                    'ImageSize %.0f' % (image_total_bytes / image_count))

            if isinstance(obj, CommandResponse):
                print('REMOTE: %s' % obj.response)

        else:
            if view_window:
                view_window = False
Пример #45
0
 def decode(self, session_data):
     """decodes the data to get back the session dict """
     pickled = base64.decodestring(session_data)
     return pickle.loads(pickled)
Пример #46
0
        self.amount_cents = amount_cents
        self.routing_number = routing_number
        self.account_number = account_number
        self.customer_name = customer_name
        self.reference_number = reference_number

    @property
    def amount(self):
        return self.amount_cents

    @property
    def date(self):
        return date.today()


if __name__ == '__main__':
    if sys.argv[1] == '--config':
        configuration = json.loads(sys.argv[2])
        print(pickle.dumps(CPAFile(**configuration)))
    elif sys.argv[1] == '--transactions':
        cpa_obj = pickle.loads(sys.argv[2])
        transactions_obj = json.loads(sys.argv[3])
        transactions = []
        for transaction in transactions_obj:
            transactions.append(Transaction(transaction['transaction_type'], transaction['amount'], transaction['routing_number'], transaction['account_number'], transaction['customer_name'], transaction['reference_number']))
        cpa_obj.set_transcations(transactions)
        print(pickle.dumps(cpa_obj))
    elif sys.argv[1] == '--generate':
        cpa_obj = pickle.loads(sys.argv[2])
        print(cpa_obj.generate_file())
Пример #47
0
def import_object(obj=None):
    if obj is None:
        obj = sys.stdin.read()
    return cPickle.loads(gzip.zlib.decompress(obj.strip().decode("base64")))
'''
tfile = open("trainer_ideal_front2.txt","a")
histo = open("histogram_ideal.txt","a")
count = 1
#Finding all the contours
while True:
    top_left = (0,0)
    bottom_right = (0,0)
    ret,im = cap.read()
    img = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
    (thresh,imgg)=cv2.threshold(img,10,255,cv2.THRESH_BINARY | cv2.THRESH_OTSU)

    ret,thresh = cv2.threshold(imgg,214,255,0)
    contours,hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
    tmp = cPickle.dumps(contours)
    contours = cPickle.loads(tmp)
#Finding the largest contour
    areas=[]
    areatmp=[]
    
    for c in contours:
        area = cv2.contourArea(c, False)
        areas.append(area)
        areatmp.append(area)
    areas.sort()
    areas.reverse()
    try:
        ind = areatmp.index(areas[1])
        ctrs = contours[ind]
    except IndexError:
        pass
Пример #49
0
def load(name):
    with open(name, "rb") as compressed:
        data = cPickle.loads(zlib.decompress(compressed.read()))
    return data
Пример #50
0
def tiles(request):
    '''Retrieve a set of tiles

    A call to this API function should retrieve a few tiles.

    Args:
        request (django.http.HTTPRequest): The request object containing
            the parameters (e.g. d=x.0.0) that identify the tiles being
            requested.

    Returns:
        django.http.JsonResponse: A JSON object containing all of the tile
            data being requested. The JSON object is just a dictionary of
            (tile_id, tile_data) items.

    '''
    # create a set so that we don't fetch the same tile multiple times
    tileids_to_fetch = set(request.GET.getlist("d"))
    # with ProcessPoolExecutor() as executor:
    #       res = executor.map(parallelize, hargs)
    '''
    p = mp.Pool(4)
    res = p.map(parallelize, hargs)
    '''

    # Return the raw data if only one tile is requested. This currently only
    # works for `imtiles`
    raw = request.GET.get('raw', False)

    tileids_by_tileset = col.defaultdict(set)
    generated_tiles = []

    tilesets = {}
    transform_id_to_original_id = {}

    # sort tile_ids by the dataset they come from
    for tile_id in tileids_to_fetch:
        tileset_uuid = tgt.extract_tileset_uid(tile_id)

        # get the tileset object first
        if tileset_uuid in tilesets:
            tileset = tilesets[tileset_uuid]
        else:
            tileset = tm.Tileset.objects.get(uuid=tileset_uuid)
            tilesets[tileset_uuid] = tileset

        if tileset.filetype == 'cooler':
            # cooler tiles can have a transform (e.g. 'ice', 'kr') which
            # needs to be added if it's not there (e.g. 'default')
            new_tile_id = add_transform_type(tile_id)
            transform_id_to_original_id[new_tile_id] = tile_id
            tile_id = new_tile_id
        else:
            transform_id_to_original_id[tile_id] = tile_id

        # see if the tile is cached
        tile_value = None
        try:
            tile_value = rdb.get(tile_id)
        except Exception as ex:
            # there was an error accessing the cache server
            # log the error and carry forward fetching the tile
            # from the original data
            logger.warn(ex)

        #tile_value = None

        if tile_value is not None:
            # we found the tile in the cache, no need to fetch it again
            tile_value = pickle.loads(tile_value)
            generated_tiles += [(tile_id, tile_value)]
            continue

        tileids_by_tileset[tileset_uuid].add(tile_id)

    # fetch the tiles
    tilesets = [tilesets[tu] for tu in tileids_by_tileset]
    accessible_tilesets = [(t, tileids_by_tileset[t.uuid], raw) for t in tilesets if ((not t.private) or request.user == t.owner)]

    #pool = mp.Pool(6)

    generated_tiles += list(it.chain(*map(tgt.generate_tiles, accessible_tilesets)))

    '''
    for tileset_uuid in tileids_by_tileset:
        # load the tileset object
        tileset = tilesets[tileset_uuid]

        # check permissions
        if tileset.private and request.user != tileset.owner:
            generated_tiles += [(tile_id, {'error': "Forbidden"}) for tile_id in tileids_by_tileset[tileset_uuid]]
        else:
            generated_tiles += generate_tiles(tileset, tileids_by_tileset[tileset_uuid])
    '''

    # store the tiles in redis

    tiles_to_return = {}

    for (tile_id, tile_value) in generated_tiles:
        try:
            rdb.set(tile_id, pickle.dumps(tile_value))
        except Exception as ex:
            # error caching a tile
            # log the error and carry forward, this isn't critical
            logger.warn(ex)

        if tile_id in transform_id_to_original_id:
            original_tile_id = transform_id_to_original_id[tile_id]
        else:
            # not in our list of reformatted tile ids, so it probably
            # wasn't requested
            continue

        if original_tile_id in tileids_to_fetch:
            tiles_to_return[original_tile_id] = tile_value

    if len(generated_tiles) == 1 and raw and 'image' in generated_tiles[0][1]:
        return HttpResponse(
            generated_tiles[0][1]['image'], content_type='image/jpeg'
        )

    return JsonResponse(tiles_to_return, safe=False)
Пример #51
0
 def on_response(response):
     if callback:
         result = pickle.loads(response.body)
         callback(result)
Пример #52
0
import numpy as np
import cv2
import glob
import cPickle
f = open('3d data.txt', 'r')
data = f.read()
index = cPickle.loads(data)
f.close()
query = cv2.imread('got1.jpg', 1)
hist = cv2.calcHist([query], [0, 1, 2], None, [8, 8, 8],
                    [0, 256, 0, 256, 0, 256])
hist = cv2.normalize(hist, 0, 256, norm_type=cv2.NORM_MINMAX)
hist = hist.flatten()
eps = 1e-10
result = {}
for (k, features) in index.items():
    d = 0.5 * np.sum([((a - b)**2 / (a + b + eps))
                      for (a, b) in zip(features, hist)])
    result[k] = d
result = sorted([(v, k) for (k, v) in result.items()])
a = result[0][1]
img = glob.glob(a)
image = img[0]
img = cv2.imread(image, 1)
cv2.imshow(a, img)
cv2.waitKey(0)
cv2.destroyAllWindows()
Пример #53
0
 def avatarListResponse(self, pickleData):
     avatars = loads(pickleData)
     messenger.send('avatarList', [avatars])
Пример #54
0
def convert_data(data):
    return cPickle.loads(str(data))
Пример #55
0
 def load(self, k):
     return cPickle.loads(zlib.decompress(self.db[k][:][0].tostring()))
Пример #56
0
def unpickle(pickled):
    try:
        return _pickle.loads(pickled)
    except TypeError:
        return _pickle.loads(str(pickled))
Пример #57
0
	def dataReceived(self, data): #receive other gamespace from server
		#print "Received data"
		self.gs.enemyspace.score = pickle.loads(data)
		self.sendData()
 def __unpackBonusByQuestID(self):
     if self.bonusByQuestID:
         self._bonusByQuestID = cPickle.loads(
             zlib.decompress(self.bonusByQuestID))
Пример #59
0
 def run(self, pipelines):
     with pipeline.InOrder():
         pipelines = cPickle.loads(str(pipelines))
         for pipe in pipelines:
             yield pipe
Пример #60
0
def serviceengine_metrics_multiprocess(r, uuid_list, se_metric_list,
                                       tenant_list, runtime):
    try:
        discovered_se = []
        metric_resp = []
        print(
            str(datetime.now()) + ' =====> Refreshing SE Static Metrics Cache')
        se_static_metric_cache_start = time.time()
        se_dict = pickle.loads(r.get('se_dict'))
        proc = []
        for t in tenant_list:
            p = Process(target=serviceengine_metrics_child,
                        args=(
                            r,
                            uuid_list,
                            se_metric_list,
                            se_dict,
                            t,
                        ))
            p.start()
            proc.append(p)
            if len(proc) > 10:
                for p in proc:
                    p.join()
                proc = []
        for p in proc:
            p.join()
        metric_keys = r.keys('temp_se_stat_*')
        for k in metric_keys:
            _1 = pickle.loads(r.get(k))
            metric_resp.append(_1['series']['collItemRequest:AllSEs'])
            r.delete(k)
        #prom_metrics = ''
        prom_metrics = ['\n']
        se_metrics_runtime = pickle.loads(r.get('se_metrics_runtime'))
        for _resp in metric_resp:
            for s in _resp:
                if s in se_dict:
                    if s not in discovered_se:
                        discovered_se.append(s)
                        for m in _resp[s]:
                            if 'data' in m:
                                temp_tags = ''
                                metric_name = m['header']['name'].replace(
                                    '.', '_').replace('-', '_')
                                metric_description = m['header'][
                                    'metric_description']
                                metric_value = m['data'][0]['value']
                                temp_payload = {}
                                temp_payload['name'] = se_dict[s]['name']
                                temp_payload['uuid'] = s
                                temp_payload['cloud'] = se_dict[s]['cloud']
                                temp_payload['se_group'] = se_dict[s][
                                    'se_group']
                                temp_payload['tenant'] = m['header'][
                                    'tenant_ref'].rsplit('#')[1]
                                temp_payload['entity_type'] = 'serviceengine'
                                for e in temp_payload:
                                    temp_tags = temp_tags + (
                                        str(e + '="' + temp_payload[e] + '",'))
                                temp_tags = '{' + temp_tags.rstrip(',') + '}'
                                #prom_metrics = prom_metrics+'\n'+'# HELP '+metric_name+' '+metric_description
                                #prom_metrics = prom_metrics+'\n'+'# TYPE '+metric_name+' gauge'
                                #prom_metrics = prom_metrics+'\n'+metric_name+''+temp_tags+' '+str(metric_value)
                                prom_metrics.append('%s 01# HELP %s %s' %
                                                    (metric_name, metric_name,
                                                     metric_description))
                                prom_metrics.append('%s 02# TYPE %s gauge' %
                                                    (metric_name, metric_name))
                                prom_metrics.append('%s %s %s' %
                                                    (metric_name, temp_tags,
                                                     str(metric_value)))
                        if 'runtime' in se_dict[s]:
                            for m in se_dict[s]['runtime']:
                                temp_payload = {}
                                temp_payload['name'] = se_dict[s]['name']
                                temp_payload['uuid'] = s
                                temp_payload['cloud'] = se_dict[s]['cloud']
                                temp_payload['se_group'] = se_dict[s][
                                    'se_group']
                                temp_payload['tenant'] = se_dict[s]['tenant']
                                temp_payload['entity_type'] = 'serviceengine'
                                se_metrics_runtime.append(m)
                                temp_tags = ''
                                if type(se_dict[s]['runtime'][m]) != int:
                                    temp_payload[m] = str(
                                        se_dict[s]['runtime'][m])
                                    int_value = False
                                else:
                                    int_value = True
                                for e in temp_payload:
                                    temp_tags = temp_tags + (
                                        str(e + '="' + temp_payload[e] + '",'))
                                temp_tags = '{' + temp_tags.rstrip(',') + '}'
                                prom_metrics.append('%s 01# HELP %s' % (m, m))
                                prom_metrics.append('%s 02# TYPE %s gauge' %
                                                    (m, m))
                                if int_value == False:
                                    prom_metrics.append('%s %s %s' %
                                                        (m, temp_tags, str(1)))
                                else:
                                    prom_metrics.append(
                                        '%s %s %s' %
                                        (m, temp_tags,
                                         str(se_dict[s]['runtime'][m])))
                            ##----- return vscount for SE
                            #metric_name = 'vscount'
                            #metric_value = se_dict[s]['vscount']
                            #temp_payload = {}
                            #temp_payload['name'] = se_dict[s]['name']
                            #temp_payload['uuid'] = s
                            #temp_payload['cloud'] = se_dict[s]['cloud']
                            #temp_payload['se_group'] = se_dict[s]['se_group']
                            #temp_payload['tenant'] = se_dict[s]['tenant']
                            #temp_payload['entity_type'] = 'serviceengine'
                            #temp_tags = ''
                            #for e in temp_payload:
                            #    temp_tags=temp_tags+(str(e+'="'+temp_payload[e]+'",'))
                            #temp_tags = '{'+temp_tags.rstrip(',')+'}'
                            #prom_metrics.append('%s 01# HELP %s' %(m,m))
                            #prom_metrics.append('%s 02# TYPE %s gauge' %(m,m))
                            #prom_metrics.append('%s %s %s' %(metric_name,temp_tags,str(metric_value)))
        se_metrics_runtime = list(set(se_metrics_runtime))
        r.set('se_metrics_runtime', pickle.dumps(se_metrics_runtime))
        #prom_metrics = prom_metrics+'\n'
        #se_metrics = prom_metrics
        prom_metrics = list(set(prom_metrics))
        prom_metrics = sorted(prom_metrics)
        for idx, item in enumerate(prom_metrics):
            if '01#' in item:
                item = item.split('01', 1)[1]
                prom_metrics[idx] = item
            elif '02#' in item:
                item = item.split('02', 1)[1]
                prom_metrics[idx] = item
        prom_metrics.append('\n')
        _se_metrics = '\n'.join(prom_metrics)
        r.set('se_polling', 'False')
        missing_metrics = []
        for _s in se_dict:
            if se_dict[_s]['name'] not in _se_metrics:
                _a = se_dict[_s]['tenant'] + ' : ' + se_dict[_s]['name']
                missing_metrics.append(_s)
        r.set('se_missing_metrics', pickle.dumps(missing_metrics))
        r.set('se_metrics', pickle.dumps(prom_metrics))
        temp_total_time = str(time.time() - se_static_metric_cache_start)
        print(
            str(datetime.now()) +
            ' =====> Refresh of SE Metrics Cache took %s seconds' %
            temp_total_time)
    except:
        r.set('se_polling', 'False')
        print(
            str(datetime.now()) +
            ' : func serviceengine_metrics encountered an error')
        exception_text = traceback.format_exc()
        print(str(datetime.now()) + ' : ' + exception_text)