Example #1
0
    def test_pickling(self):

        fsm = Fysom({
            'initial': 'green',
            'events': [
                {'name': 'warn', 'src': 'green', 'dst': 'yellow'},
                {'name': 'panic', 'src': 'yellow', 'dst': 'red'},
                {'name': 'calm', 'src': 'red', 'dst': 'yellow'},
                {'name': 'clear', 'src': 'yellow', 'dst': 'green'}
            ]
        })

        pickled = dumps(fsm)
        assert pickled
        fsm = loads(pickled)

        self.assertTrue(isinstance(fsm, Fysom))
        self.assertEquals('green', fsm.current)

        fsm.warn()
        pickled = dumps(fsm)
        assert pickled
        fsm = loads(pickled)

        self.assertEquals('yellow', fsm.current)
Example #2
0
    def test_versionUpgrade(self):
        global MyVersioned
        class MyVersioned(styles.Versioned):
            persistenceVersion = 2
            persistenceForgets = ['garbagedata']
            v3 = 0
            v4 = 0

            def __init__(self):
                self.somedata = 'xxx'
                self.garbagedata = lambda q: 'cant persist'

            def upgradeToVersion3(self):
                self.v3 += 1

            def upgradeToVersion4(self):
                self.v4 += 1
        mv = MyVersioned()
        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
        pickl = pickle.dumps(mv)
        MyVersioned.persistenceVersion = 4
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3, "didn't do version 3 upgrade"
        assert obj.v4, "didn't do version 4 upgrade"
        pickl = pickle.dumps(obj)
        obj = pickle.loads(pickl)
        styles.doUpgrade()
        assert obj.v3 == 1, "upgraded unnecessarily"
        assert obj.v4 == 1, "upgraded unnecessarily"
Example #3
0
 def edit_convergence_datas(self, session, gid, papi, package_id, datas):
     datas['cmdPhases'] = cPickle.dumps(datas['cmdPhases'])
     return session.query(Convergence).filter_by(
         parentGroupId = gid,
         papi = cPickle.dumps(papi),
         packageUUID = package_id
     ).update(datas)
Example #4
0
def logProtectedCall(result, path, args, user, allowed=True):
    """This should be called when a protected call was attempted,
       successful or not. It logs the attempt and its results in the
       audit_trail database. This audit trail can be used for several things-
       listing recently updated metadata (perhaps for a 'whats new?' page)
       or detecting and recovering from malicious use of keys.
       """
    # Store the first argument separately so we can relatively efficiently search for it
    if args:
        main_param = str(args[0])
    else:
        main_param = None

    # Get the user's UID. If it hasn't even been looked up successfully,
    # this is just a failed operation on a nonexistent user and it's not worth logging.
    uid = user.getCachedUid()
    if uid is None:
        return

    Database.pool.runOperation(
        "INSERT INTO audit_trail (timestamp, uid, action_domain, action_name,"
        " main_param, params, allowed, results)"
        " VALUES(%d, %d, 'protected_call', %s, %s, '%s', %d, '%s')" % (
        time.time(),
        uid,
        Database.quote(".".join(path), 'text'),
        Database.quote(main_param, 'text'),
        Database.quoteBlob(cPickle.dumps(args)),
        allowed,
        Database.quoteBlob(cPickle.dumps(result))))
    return result
Example #5
0
 def index_all_links(self):
     for key,value in self.link_index_dict.items():
         value = cPickle.dumps(value)
         self.client.hset(INVERT_TABLE_LINK,key,value)
     for key,value in self.title_index_dict.items():
         value = cPickle.dumps(value)
         self.client.hset(INVERT_TABLE_TITLE,key,value)
Example #6
0
    def __setitem__(self, key, value, index=None):
        """Add a new file in the cache. 'value' is expected to contains the
        path of file.

        """
        _i = self._open_index() if index is None else index

        key = pickle.dumps(key, protocol=-1)

        original_value = value

        md5 = hashlib.md5(open(value, 'rb').read()).hexdigest()
        filename = os.path.basename(value)
        value = (md5, filename)
        value = pickle.dumps(value, protocol=-1)
        _i[key] = value

        # Move the file into the container using a hard link
        cache_fn = os.path.join(self.cachedir, md5[:2], md5[2:])
        if os.path.exists(cache_fn):
            # Manage collisions using hard links and refcount
            collisions = sorted(glob.glob(cache_fn + '-*'))
            next_refcount = 1
            if collisions:
                next_refcount = int(collisions[-1][-3:]) + 1
            next_cache_fn = cache_fn + '-%03d' % next_refcount
            os.link(cache_fn, next_cache_fn)
        else:
            dirname = os.path.dirname(cache_fn)
            if not os.path.exists(dirname):
                os.makedirs(dirname)
            os.link(original_value, cache_fn)

        if index is None:
            self._close_index(_i)
Example #7
0
    def save(self):
        buffer = ""

        # Save class watermarks
        if self.__wms["SSS_CLASS"] != None:
            sss = self.__wms["SSS_CLASS"]

            buffer += '<class name="%s">\n' % self.__class_name
            buffer += "<threshold>%d</threshold>\n" % (sss.get_threshold())
            buffer += "<sss>%s</sss>\n" % (base64.b64encode(cPickle.dumps(sss.get_y())))

            for j in self.__wms["CLASS"]:
                buffer += '<wm type="%s">%s</wm>\n' % (j[0], base64.b64encode(cPickle.dumps(j[1].get_export_context())))

            buffer += "</class>\n"

        # Save methods watermarks
        for i in self.__wms["SSS_METHODS"]:
            sss = self.__wms["SSS_METHODS"][i]
            buffer += '<method class="%s" name="%s" descriptor="%s">\n' % (
                i.get_class_name(),
                escape(i.get_name()),
                i.get_descriptor(),
            )
            buffer += "<threshold>%d</threshold>\n" % (sss.get_threshold())
            buffer += "<sss>%s</sss>\n" % (base64.b64encode(cPickle.dumps(sss.get_y())))

            for j in self.__wms["METHODS"][i]:
                buffer += '<wm type="%s">%s</wm>\n' % (j[0], base64.b64encode(cPickle.dumps(j[1].get_export_context())))

            buffer += "</method>\n"

        return buffer
Example #8
0
  def testNupicRandomPickling(self):
    """Test pickling / unpickling of NuPIC randomness."""

    # Simple test: make sure that dumping / loading works...
    r = Random(42)
    pickledR = pickle.dumps(r)

    test1 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test2 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(test1, test2,
                     "Simple NuPIC random pickle/unpickle failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    pickledR = pickle.dumps(r)

    test3 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test4 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(
        test3, test4,
        "NuPIC random pickle/unpickle didn't work for saving later state.")

    self.assertNotEqual(test1, test3,
                        "NuPIC random gave the same result twice?!?")
Example #9
0
 def record(self, case):
     """Record the given Case."""
     cur = self._connection.cursor()
     
     cur.execute("""insert into cases(id,uuid,parent,label,msg,retries,model_id,timeEnter) 
                        values (?,?,?,?,?,?,?,DATETIME('NOW'))""", 
                                  (None, case.uuid, case.parent_uuid, case.label,
                                   case.msg or '', case.retries, 
                                   self.model_id))
     case_id = cur.lastrowid
     # insert the inputs and outputs into the vars table.  Pickle them if they're not one of the
     # built-in types int, float, or str.
     
     for name,value in case.items(iotype='in'):
         if isinstance(value, (float,int,str)):
             v = (None, name, case_id, 'i', value)
         else:
             v = (None, name, case_id, 'i', sqlite3.Binary(dumps(value,HIGHEST_PROTOCOL)))
         cur.execute("insert into casevars(var_id,name,case_id,sense,value) values(?,?,?,?,?)", 
                     v)
     for name,value in case.items(iotype='out'):
         if isinstance(value, (float,int,str)):
             v = (None, name, case_id, 'o', value)
         else:
             v = (None, name, case_id, 'o', sqlite3.Binary(dumps(value,HIGHEST_PROTOCOL)))
         cur.execute("insert into casevars(var_id,name,case_id,sense,value) values(?,?,?,?,?)", 
                     v)
     self._connection.commit()
Example #10
0
 def get_prep_value(self, value):
     if isinstance(value, list):
         return cPickle.dumps(value)
     elif isinstance(value, np.ndarray):
         return cPickle.dumps(value.tolist())
     else:
         raise TypeError('%s is not a list or numpy array' % value)
Example #11
0
def test_joblib_exception():
    # Smoke-test the custom exception
    e = JoblibException('foobar')
    # Test the repr
    repr(e)
    # Test the pickle
    pickle.dumps(e)
Example #12
0
def pack_py_payload(conf):
    print colorize("[+] ","green")+"generating payload ..."
    fullpayload=[]

    with open(os.path.join(ROOT,"packages","all", "pupyimporter.py")) as f:
        pupyimportercode=f.read()
    fullpayload.append(get_load_module_code(pupyimportercode,"pupyimporter")+"\n")

    modules_dic=gen_package_pickled_dic(rpyc.__path__[0],"rpyc")
    fullpayload.append("import pupyimporter\npupyimporter.install()\npupyimporter.pupy_add_package(%s)\nimport rpyc"%repr(cPickle.dumps(modules_dic)))

    modules_dic=gen_package_pickled_dic(os.path.join(ROOT,"network"),"network")
    fullpayload.append("pupyimporter.pupy_add_package(%s)"%repr(cPickle.dumps(modules_dic)))

    modules_dic=gen_package_pickled_dic(pyasn1.__path__[0],"pyasn1")
    fullpayload.append("pupyimporter.pupy_add_package(%s)"%repr(cPickle.dumps(modules_dic)))

    modules_dic=gen_package_pickled_dic(rsa.__path__[0],"rsa")
    fullpayload.append("pupyimporter.pupy_add_package(%s)"%repr(cPickle.dumps(modules_dic)))

    with open(os.path.join(ROOT,"pp.py")) as f:
        code=f.read()
    code=re.sub(r"LAUNCHER=.*\nLAUNCHER_ARGS=.*", conf, code)
    fullpayload.append(code+"\n")
    
    return compress_encode_obfs('\n'.join(fullpayload)+"\n")
 def validate_reconstruction_results(self, expected, actual):
     expected = self.data.get_node(expected)
     actual = self.data.get_node(actual)
     self.validate_column_data(expected, actual)
     self.assertIn('cluster', actual.attrs)
     self.assertEqual(pickle.dumps(expected.attrs.cluster),
                      pickle.dumps(actual.attrs.cluster))
    def testDatabaseFixes(self):
        # Hack the pickle to make it refer to a timezone abbreviation
        # that does not match anything. The unpickler should be able
        # to repair this case
        tz = pytz.timezone('Australia/Melbourne')
        p = pickle.dumps(tz)
        tzname = tz._tzname
        hacked_p = p.replace(_byte_string(tzname),
                             _byte_string('?'*len(tzname)))
        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertTrue(tz is unpickled_tz)

        # Simulate a database correction. In this case, the incorrect
        # data will continue to be used.
        p = pickle.dumps(tz)
        new_utcoffset = tz._utcoffset.seconds + 42

        # Python 3 introduced a new pickle protocol where numbers are stored in
        # hexadecimal representation. Here we extract the pickle
        # representation of the number for the current Python version.
        old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
        new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
        hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)

        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
        self.assertTrue(tz is not unpickled_tz)
 def test_copying(self):
     # Check that counters are copyable, deepcopyable, picklable, and
     #have a repr/eval round-trip
     words = Counter('which witch had which witches wrist watch'.split())
     update_test = Counter()
     update_test.update(words)
     for i, dup in enumerate([
                 words.copy(),
                 copy.copy(words),
                 copy.deepcopy(words),
                 pickle.loads(pickle.dumps(words, 0)),
                 pickle.loads(pickle.dumps(words, 1)),
                 pickle.loads(pickle.dumps(words, 2)),
                 pickle.loads(pickle.dumps(words, -1)),
                 cPickle.loads(cPickle.dumps(words, 0)),
                 cPickle.loads(cPickle.dumps(words, 1)),
                 cPickle.loads(cPickle.dumps(words, 2)),
                 cPickle.loads(cPickle.dumps(words, -1)),
                 eval(repr(words)),
                 update_test,
                 Counter(words),
                 ]):
         msg = (i, dup, words)
         self.assertTrue(dup is not words)
         self.assertEquals(dup, words)
         self.assertEquals(len(dup), len(words))
         self.assertEquals(type(dup), type(words))
Example #16
0
 def test_pickling(self):
     """intbitset - pickling"""
     import cPickle
     for set1 in self.sets + [[]]:
         self.assertEqual(intbitset(set1), cPickle.loads(cPickle.dumps(intbitset(set1), -1)))
     for set1 in self.sets + [[]]:
         self.assertEqual(intbitset(set1, trailing_bits=True), cPickle.loads(cPickle.dumps(intbitset(set1, trailing_bits=True), -1)))
Example #17
0
	def store(self, id, pickle):
		'''
		Take a data object and try to pickle it in our jar
		1) attempt to pickle
		2) collect meta about the pickle
		3) derive a mapping
		4) store the pickle in the jar
		5) update our jar map
		'''
		try: 
			cPickle.dumps(pickle)
		except cPickle.PicklingError: 
			raise ValueError('NOTAPICKLE', 'Your data cannot be pickled')
		 
		meta = { 
			'id': id,
			'dir': dir(pickle),
			'module': pickle.__module__,
			'type': type(pickle).__name__,
			'size': sys.getsizeof(pickle)
		} 
		
		pid = hashlib.sha256(id).hexdigest()
		location = self.__writePoint + pid + '.pickle'
		if os.path.isfile(location) and not self.__jarContents['overwrite']:
			raise ValueError('OVERWRITE', 'Your jar is not set to overwrite pickles. Set this option and retry or pick a different ID')
			
		with open(location, "wb") as pickled:
			cPickle.dump(pickle, pickled)
		pickled.close()
		
		self.__jarContents['mappings'][pid] = meta
		self.__save()
Example #18
0
	def xmlrpc_startMonitor(self, msg):
		msg = pickle.loads(msg)
		if self._id == None or msg.id != self._id:
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		if msg.type != _MsgType.StartMonitor:
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		print "Agent: startMonitor(%s)" % msg.monitorName
		
		try:
			code = msg.monitorClass + "(msg.params)"
			print "code:", code
			monitor = eval(code)
			
			if monitor == None:
				print "Agent: Unable to create Monitor [%s]" % msg.monitorClass
				return pickle.dumps(_Msg(self._id, _MsgType.Nack, "Unable to create Monitor [%s]" % msg.monitorClass))
			
			monitor.monitorName = msg.monitorName
			self._monitors.append(monitor)
			
			print "Agent: Sending Ack"
			return pickle.dumps(_Msg(None, _MsgType.Ack))
			
		except:
			print "Agent: Unable to create Monitor [%s], exception occured." % msg.monitorClass
			raise
			return pickle.dumps(_Msg(None, _MsgType.Nack, "Unable to create Monitor [%s], exception occured." % msg.monitorClass))
Example #19
0
    def test_pickle(self):
        from numpy import dtype, zeros
        import sys
        try:
            from numpy.core.multiarray import scalar
        except ImportError:
            # running on dummy module
            from numpy import scalar
        from cPickle import loads, dumps
        i = dtype('int32').type(1337)
        f = dtype('float64').type(13.37)
        c = dtype('complex128').type(13 + 37.j)

        swap = lambda s: (''.join(reversed(s))) if sys.byteorder == 'big' else s
        assert i.__reduce__() == (scalar, (dtype('int32'), swap('9\x05\x00\x00')))
        assert f.__reduce__() == (scalar, (dtype('float64'), swap('=\n\xd7\xa3p\xbd*@')))
        assert c.__reduce__() == (scalar, (dtype('complex128'), swap('\x00\x00\x00\x00\x00\x00*@') + \
                                                                swap('\x00\x00\x00\x00\x00\x80B@')))

        assert loads(dumps(i)) == i
        assert loads(dumps(f)) == f
        assert loads(dumps(c)) == c

        a = zeros(3)
        assert loads(dumps(a.sum())) == a.sum()
Example #20
0
	def xmlrpc_clientHello(self, msg):
		msg = pickle.loads(msg)
		if msg.password != self._password:
			print "Agent: Incorrect password on clientHello [%s]" % msg.password
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		if msg.type != _MsgType.ClientHello:
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		print "Agent: clientHello()"
		
		if self._id != None:
			self._stopAllMonitors()
		
		self._id = str(uuid.uuid1())
		print "Agent: Session ID: ", self._id
		
		# Handle any PythonPath or Imports
		if msg.pythonPaths != None:
			for p in msg.pythonPaths:
				sys.path.append(p['name'])
		
		if msg.imports != None:
			for i in msg.imports:
				self._handleImport(i)
		
		print "Agent: clientHello() all done"
		
		return pickle.dumps(_Msg(self._id, _MsgType.AgentHello))
Example #21
0
	def xmlrpc_getMonitorData(self, msg):
		msg = pickle.loads(msg)
		if self._id == None or msg.id != self._id:
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		if msg.type != _MsgType.GetMonitorData:
			return pickle.dumps(_Msg(None, _MsgType.Nack))
		
		print "Agent: getMonitorData()"
		
		msg = _Msg(None, _MsgType.Ack)
		msg.results = []
		
		for m in self._monitors:
			try:
				data = m.GetMonitorData()
				if data != None:
					msg.results.append(data)
				
			except:
				print "Agent: getMonitorData: Failrue getting data from:", m.monitorName
				raise
				#pass
		
		return pickle.dumps(msg)
Example #22
0
    def dump(self):
        """Returns a serialization of the current job instance"""
        obj = {}
        obj['created_at'] = utcformat(self.created_at or utcnow())
        obj['data'] = self.data

        if self.origin is not None:
            obj['origin'] = self.origin
        if self.description is not None:
            obj['description'] = self.description
        if self.enqueued_at is not None:
            obj['enqueued_at'] = utcformat(self.enqueued_at)
        if self.ended_at is not None:
            obj['ended_at'] = utcformat(self.ended_at)
        if self._result is not None:
            obj['result'] = dumps(self._result)
        if self.exc_info is not None:
            obj['exc_info'] = self.exc_info
        if self.timeout is not None:
            obj['timeout'] = self.timeout
        if self.result_ttl is not None:
            obj['result_ttl'] = self.result_ttl
        if self._status is not None:
            obj['status'] = self._status
        if self._dependency_id is not None:
            obj['dependency_id'] = self._dependency_id
        if self.meta:
            obj['meta'] = dumps(self.meta)

        return obj
Example #23
0
def test_ttest_ind():
    "Test testnd.ttest_ind()"
    ds = datasets.get_uts(True)

    # basic
    res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds)
    repr(res)
    assert_less(res.p_uncorrected.min(), 0.05)
    # persistence
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    repr(res_)
    assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected)

    # cluster
    res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds, tail=1, samples=1)
    # persistence
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    assert_equal(repr(res_), repr(res))
    assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected)

    # nd
    res = testnd.ttest_ind('utsnd', 'A', 'a1', 'a0', ds=ds, pmin=0.05, samples=2)
    eq_(res._cdist.n_clusters, 10)
Example #24
0
 def test_pickle(self):
     md = PyMimeData(data=0)
     self.assertEqual(md._local_instance, 0)
     self.assertTrue(md.hasFormat(PyMimeData.MIME_TYPE))
     self.assertFalse(md.hasFormat(PyMimeData.NOPICKLE_MIME_TYPE))
     self.assertEqual(md.data(PyMimeData.MIME_TYPE).data(),
                      dumps(int) + dumps(0))
    def test_pickle_dump_load(self):
        # Wipe current cache
        DescriptorMemoryElement.MEMORY_CACHE = {}

        # Make a couple descriptors
        v1 = numpy.array([1, 2, 3])
        d1 = DescriptorMemoryElement('test', 0)
        d1.set_vector(v1)

        v2 = numpy.array([4, 5, 6])
        d2 = DescriptorMemoryElement('test', 1)
        d2.set_vector(v2)

        ntools.assert_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)

        d1_s = cPickle.dumps(d1)
        d2_s = cPickle.dumps(d2)

        # Wipe cache again
        DescriptorMemoryElement.MEMORY_CACHE = {}
        ntools.assert_not_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_not_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)

        # Attempt reconstitution
        d1_r = cPickle.loads(d1_s)
        d2_r = cPickle.loads(d2_s)

        numpy.testing.assert_array_equal(v1, d1_r.vector())
        numpy.testing.assert_array_equal(v2, d2_r.vector())

        # Cache should now have those entries back in it
        ntools.assert_in(('test', 0), DescriptorMemoryElement.MEMORY_CACHE)
        ntools.assert_in(('test', 1), DescriptorMemoryElement.MEMORY_CACHE)
Example #26
0
def test_anova():
    "Test testnd.anova()"
    plot.configure_backend(False, False)
    ds = datasets.get_rand(True)

    testnd.anova('utsnd', 'A*B', ds=ds)

    res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=0, pmin=0.05)
    repr(res)
    p = plot.Array(res)
    p.close()

    res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=2, pmin=0.05)
    repr(res)
    p = plot.Array(res)
    p.close()

    # persistence
    string = pickle.dumps(res, protocol=pickle.HIGHEST_PROTOCOL)
    res_ = pickle.loads(string)
    assert_equal(repr(res_), repr(res))

    # test multi-effect results (with persistence)
    # UTS
    res = testnd.anova('uts', 'A*B*rm', ds=ds, samples=5)
    repr(res)
    string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL)
    res = pickle.loads(string)
    tfce_clusters = res.tfce_clusters(pmin=0.05)
    peaks = res.tfce_peaks()
    assert_equal(tfce_clusters.eval("p.min()"), peaks.eval("p.min()"))
    unmasked = res.f[0]
    masked = res.masked_parameter_map(effect=0, pmin=0.05)
    assert_array_equal(masked.x <= unmasked.x, True)
Example #27
0
    def gotResult(self, data, task, ttype):
        '''
            获取数据。任务分2种。
            1. 商铺信息,需要抓取商铺的商品列表
            2. 商品信息,需要抓取商品的基本信息
        '''
        # TODO refactor this
        if data:
            if ttype == 'extract':
                total_page, hrefs = json.loads(data)
                total_page = int(total_page)
                hrefs = json.loads(hrefs)
                tids = check_duplicate(self.redis, hrefs)
                #save_tasks(self.redis, tids)
                for h in hrefs:
                    tmp_tid = self.new_task_id()
                    log.info(h)
                    tmp_tbody = {'task': h}
                    tmp_task = BaseTask(tmp_tid, tmp_tbody)
                    self.redis.push_list_data('task_queue', cPickle.dumps(tmp_task))

                task = cPickle.loads(task)
                page = task.tbody.get('page', 1)
                if page == 1 and page < total_page:
                    tmp_tbody = task.tbody
                    for p in xrange(page, total_page):
                        tmp_tid = self.new_task_id()
                        tmp_tbody['page'] = p+1
                        tmp_task = BaseTask(tmp_tid, tmp_tbody)
                        self.redis.push_list_data('extract_queue', cPickle.dumps(tmp_task))

            else:
                save_items(json.loads(data))
        else:
            log.debug('Got an invalid task: %s when taking task: %s' % (task, ttype))
Example #28
0
def find_nearest_pickleable_exception(exc):
    """With an exception instance, iterate over its super classes (by mro)
    and find the first super exception that is pickleable. It does
    not go below :exc:`Exception` (i.e. it skips :exc:`Exception`,
    :class:`BaseException` and :class:`object`). If that happens
    you should use :exc:`UnpickleableException` instead.

    :param exc: An exception instance.

    :returns: the nearest exception if it's not :exc:`Exception` or below,
        if it is it returns ``None``.

    :rtype: :exc:`Exception`

    """

    unwanted = (Exception, BaseException, object)
    is_unwanted = lambda exc: any(map(curry(operator.is_, exc), unwanted))

    mro_ = getattr(exc.__class__, "mro", lambda: [])
    for supercls in mro_():
        if is_unwanted(supercls):
            # only BaseException and object, from here on down,
            # we don't care about these.
            return None
        try:
            exc_args = getattr(exc, "args", [])
            superexc = supercls(*exc_args)
            pickle.dumps(superexc)
        except:
            pass
        else:
            return superexc
    return None
  def testSerialization(self):
    params = {
      'inputDimensions' : [2,4,5,2],
      'columnDimensions' : [4,3,3],
      'potentialRadius' : 30,
      'potentialPct' : 0.7,
      'globalInhibition' : False,
      'localAreaDensity' : 0.23,
      'numActiveColumnsPerInhArea' : 0,
      'stimulusThreshold' : 2,
      'synPermInactiveDec' : 0.02,
      'synPermActiveInc' : 0.1,
      'synPermConnected' : 0.12,
      'minPctOverlapDutyCycle' : 0.011,
      'minPctActiveDutyCycle' : 0.052,
      'dutyCyclePeriod' : 25,
      'maxBoost' : 11.0,
      'seed' : 19,
      'spVerbosity' : 0
    }
    sp1 = self.createSp("py", params)
    sp2 = pickle.loads(pickle.dumps(sp1))
    self.compare(sp1, sp2)

    sp1 = self.createSp("cpp", params)
    sp2 = pickle.loads(pickle.dumps(sp1))
    self.compare(sp1, sp2)
Example #30
0
File: job.py Project: hungld/rq
    def save(self):
        """Persists the current job instance to its corresponding Redis key."""
        key = self.key

        obj = {}
        obj['created_at'] = times.format(self.created_at or times.now(), 'UTC')

        if self.func_name is not None:
            obj['data'] = dumps(self.job_tuple)
        if self.origin is not None:
            obj['origin'] = self.origin
        if self.description is not None:
            obj['description'] = self.description
        if self.enqueued_at is not None:
            obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC')
        if self.ended_at is not None:
            obj['ended_at'] = times.format(self.ended_at, 'UTC')
        if self._result is not None:
            obj['result'] = dumps(self._result)
        if self.exc_info is not None:
            obj['exc_info'] = self.exc_info
        if self.timeout is not None:
            obj['timeout'] = self.timeout
        if self.result_ttl is not None:
            obj['result_ttl'] = self.result_ttl
        if self._status is not None:
            obj['status'] = self._status
        if self.meta:
            obj['meta'] = dumps(self.meta)

        self.connection.hmset(key, obj)
Example #31
0
 def call_plugin_serialized(self, plugin, fn, *args, **kwargs):
     params = {'params': pickle.dumps(dict(args=args, kwargs=kwargs))}
     rv = self.call_plugin(plugin, fn, params)
     return pickle.loads(rv)
Example #32
0
 def test_pickle_bug(self):
     # Regression test for bug fixed in 24d4fd291054.
     o = Prod()
     s = cPickle.dumps(o, protocol=-1)
     o = cPickle.loads(s)
     cPickle.dumps(o)
Example #33
0
	
	print(imagePath)
	print(label)
	"""

# grab the unique target names and encode the labels
targetNames = np.unique(target)
le = LabelEncoder()
target = le.fit_transform(target)

# construct the training and testing splits
(trainData, testData, trainTarget,
 testTarget) = train_test_split(data, target, test_size=0.3, random_state=42)

# train the model
model = LinearSVC(random_state=42)
model.fit(data, target)

# evaluate the classifier
print classification_report(testTarget,
                            model.predict(testData),
                            target_names=targetNames)

#clf = MLPClassifier(algorithm='l-bfgs', alpha=1e-5, hidden_layer_sizes=(15,), random_state=1)
#clf.fit(data, label)

# dump the model to file
f = open(args["model"], "w")
f.write(cPickle.dumps(model))
f.close()
Example #34
0
def train():
    """Train CIFAR-10 for a number of steps."""

    g1 = tf.Graph()
    with g1.as_default():
        #global_step = tf.contrib.framework.get_or_create_global_step()

        global_step = tf.Variable(-1,
                                  name='global_step',
                                  trainable=False,
                                  dtype=tf.int32)
        increment_global_step_op = tf.assign(global_step, global_step + 1)

        # Get images and labels for CIFAR-10.
        images, labels = cifar10.distorted_inputs()

        # Build a Graph that computes the logits predictions from the
        # inference model.
        logits = cifar10.inference(images)

        # Calculate loss.
        loss = cifar10.loss(logits, labels)
        grads = cifar10.train_part1(loss, global_step)

        only_gradients = [g for g, _ in grads]

        class _LoggerHook(tf.train.SessionRunHook):
            """Logs loss and runtime."""
            def begin(self):
                self._step = -1
                self._start_time = time.time()

            def before_run(self, run_context):
                self._step += 1
                return tf.train.SessionRunArgs(loss)  # Asks for loss value.

            def after_run(self, run_context, run_values):
                if self._step % FLAGS.log_frequency == 0:
                    current_time = time.time()
                    duration = current_time - self._start_time
                    self._start_time = current_time

                    loss_value = run_values.results
                    examples_per_sec = FLAGS.log_frequency * FLAGS.batch_size / duration
                    sec_per_batch = float(duration / FLAGS.log_frequency)

                    format_str = (
                        '%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
                        'sec/batch)')
                    print(format_str % (datetime.now(), self._step, loss_value,
                                        examples_per_sec, sec_per_batch))

        with tf.train.MonitoredTrainingSession(
                checkpoint_dir=FLAGS.train_dir,
                hooks=[
                    tf.train.StopAtStepHook(last_step=FLAGS.max_steps),
                    tf.train.NanTensorHook(loss),
                    _LoggerHook()
                ],
                config=tf.ConfigProto(
                    log_device_placement=FLAGS.log_device_placement,
                    gpu_options=gpu_options)) as mon_sess:
            global port
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.connect((TCP_IP, port))
            #receiving the variable values
            recv_size = safe_recv(8, s)
            recv_size = pickle.loads(recv_size)
            recv_data = safe_recv(recv_size, s)
            var_vals = pickle.loads(recv_data)
            s.close()
            feed_dict = {}
            i = 0
            for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
                feed_dict[v] = var_vals[i]
                i = i + 1
            print("Received variable values from ps")
            while not mon_sess.should_stop():
                gradients, step_val = mon_sess.run(
                    [only_gradients, increment_global_step_op],
                    feed_dict=feed_dict)
                #gradients, step_val = mon_sess.run([only_gradients,increment_global_step_op], feed_dict=feed_dict)
                #print("Sending grads")
                # Opening the socket and connecting to server
                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect((TCP_IP, port))
                # sending the gradients
                send_data = pickle.dumps(gradients, pickle.HIGHEST_PROTOCOL)
                to_send_size = len(send_data)
                send_size = pickle.dumps(to_send_size, pickle.HIGHEST_PROTOCOL)
                #print("size of grads: ", to_send_size)
                s.sendall(send_size)
                #print("Size of size: ", len(send_size))
                s.sendall(send_data)
                #print("sent grads")
                #receiving the variable values
                recv_size = safe_recv(8, s)
                recv_size = pickle.loads(recv_size)
                recv_data = safe_recv(recv_size, s)
                var_vals = pickle.loads(recv_data)
                s.close()
                #print("recved grads")

                feed_dict = {}
                i = 0
                for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
                    feed_dict[v] = var_vals[i]
                    i = i + 1
Example #35
0
 def _can_pickle(self, host):
     try:
         cPickle.dumps(host)
         return True
     except TypeError:
         return False
                CONNECTION_LIST.append(sockfd)
                server_log.log("Client (%s, %s) connected" % addr)

            #Some incoming message from a connected client
            else:
                # Data recieved from client, process it
                try:
                    #In Windows, sometimes when a TCP program closes abruptly,
                    # a "Connection reset by peer" exception will be thrown

                    answer = ["Robot says:"]
                    if server_log.has_new_line():
                        answer += server_log.get_lastline()
                    if motor_log.has_new_line():
                        answer += motor_log.get_lastline()
                    send_data = pickle.dumps(answer)
                    data = sock.recv(RECV_BUFFER)
                    sock.send(send_data)
                    rcvd_dict = pickle.loads(data)

                    if 'ip_addr' in rcvd_dict:
                        # We have a destination for our video stream. setup and start the thread
                        if VIDEO:
                            video_playing = True
                            video_thread = sendVideo(rcvd_dict['ip_addr'])
                            video_thread.setDaemon(True)
                            video_thread.start()
                            server_log.log("Started video")

                    if 'robot_type' in rcvd_dict:
                        # We have a robot definition! Setup and start the motor thread
Example #37
0
 def read(self, amt=None):
     return pickle.dumps({})
Example #38
0
def dispatch_request(nodes, manager, element, configmanager, inputdata,
                     operation):
    a = configmanager.get_collective_member(manager)
    try:
        remote = socket.create_connection((a['address'], 13001))
        remote.settimeout(180)
        remote = ssl.wrap_socket(remote,
                                 cert_reqs=ssl.CERT_NONE,
                                 keyfile='/etc/confluent/privkey.pem',
                                 certfile='/etc/confluent/srvcert.pem')
    except Exception:
        for node in nodes:
            if a:
                yield msg.ConfluentResourceUnavailable(
                    node,
                    'Collective member {0} is unreachable'.format(a['name']))
            else:
                yield msg.ConfluentResourceUnavailable(
                    node,
                    '"{0}" is not recognized as a collective member'.format(
                        manager))

        return
    if not util.cert_matches(a['fingerprint'],
                             remote.getpeercert(binary_form=True)):
        raise Exception("Invalid certificate on peer")
    banner = tlvdata.recv(remote)
    vers = banner.split()[2]
    if vers == b'v0':
        pvers = 2
    elif vers == b'v1':
        pvers = 4
    if sys.version_info[0] < 3:
        pvers = 2
    tlvdata.recv(remote)
    myname = collective.get_myname()
    dreq = pickle.dumps(
        {
            'name': myname,
            'nodes': list(nodes),
            'path': element,
            'tenant': configmanager.tenant,
            'operation': operation,
            'inputdata': inputdata
        },
        protocol=pvers)
    tlvdata.send(remote, {'dispatch': {'name': myname, 'length': len(dreq)}})
    remote.sendall(dreq)
    while True:
        try:
            rlen = remote.recv(8)
        except Exception:
            for node in nodes:
                yield msg.ConfluentResourceUnavailable(
                    node,
                    'Collective member {0} went unreachable'.format(a['name']))
            return
        while len(rlen) < 8:
            try:
                nlen = remote.recv(8 - len(rlen))
            except Exception:
                nlen = 0
            if not nlen:
                for node in nodes:
                    yield msg.ConfluentResourceUnavailable(
                        node, 'Collective member {0} went unreachable'.format(
                            a['name']))
                return
            rlen += nlen
        rlen = struct.unpack('!Q', rlen)[0]
        if rlen == 0:
            break
        try:
            rsp = remote.recv(rlen)
        except Exception:
            for node in nodes:
                yield msg.ConfluentResourceUnavailable(
                    node,
                    'Collective member {0} went unreachable'.format(a['name']))
            return
        while len(rsp) < rlen:
            try:
                nrsp = remote.recv(rlen - len(rsp))
            except Exception:
                nrsp = 0
            if not nrsp:
                for node in nodes:
                    yield msg.ConfluentResourceUnavailable(
                        node, 'Collective member {0} went unreachable'.format(
                            a['name']))
                return
            rsp += nrsp
        try:
            rsp = pickle.loads(rsp, **pargs)
        except UnicodeDecodeError:
            rsp = pickle.loads(rsp, encoding='latin1')
        if isinstance(rsp, Exception):
            raise rsp
        yield rsp
Example #39
0
def main():
    if len(sys.argv) < 3:
        print 'Usage: %s <server name/ip> <server port>' % sys.argv[0]
        sys.exit(1)

    # Create a pseudo-file wrapper, condition variable, and socket.  These will
    # be passed to the thread we're about to create.
    wrap = mywrapper()

    # Create a condition variable to synchronize the receiver and player threads.
    # In python, this implicitly creates a mutex lock too.
    # See: https://docs.python.org/2/library/threading.html#condition-objects
    cond_filled = threading.Condition()

    # Create a TCP socket and try connecting to the server.
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.connect((sys.argv[1], int(sys.argv[2])))

    # Create a thread whose job is to receive messages from the server.
    recv_thread = threading.Thread(target=recv_thread_func,
                                   args=(wrap, cond_filled, sock))
    recv_thread.daemon = True
    recv_thread.start()

    # Create a thread whose job is to play audio file data.
    dev = ao.AudioDevice('pulse')
    play_thread = threading.Thread(target=play_thread_func,
                                   args=(wrap, cond_filled, dev))
    play_thread.daemon = True
    play_thread.start()

    global current_song
    global number_of_songs

    # Enter our never-ending user I/O loop.  Because we imported the readline
    # module above, raw_input gives us nice shell-like behavior (up-arrow to
    # go backwards, etc.).
    while True:

        line = raw_input('>> ')

        if ' ' in line:
            cmd, args = line.split(' ', 1)
        else:
            cmd = line

        # Send messages to the server when the user types things.
        request_arg = -1
        if cmd in ['l', 'list']:
            print 'The user asked for list.'
            request_type = 0

        if cmd in ['p', 'play']:
            print 'The user asked to play:', args
            request_type = 1
            request_arg = args

            if current_song != int(args) and int(args) < number_of_songs:
                current_song = int(args)
            else:
                request_type = -1

            if int(args) >= number_of_songs or int(args) < 0:
                print(
                    "Please give a valid song number!  Use list if you'd like")

        if cmd in ['s', 'stop']:
            print 'The user asked for stop.'
            request_type = 2
            current_song = -1

        if cmd in ['quit', 'q', 'exit']:
            packet = {}
            packet["type"] = "client_shutdown"
            sock.sendall(pickle.dumps(packet))
            sys.exit(0)

        if request_type > -1:
            # Create packet to send
            packet = {}
            packet["type"] = "client_request"
            packet["msg"] = str(request_type) + str(request_arg)
            packet["len"] = len(packet["msg"])
            packet["last"] = True
            packet["seq"] = 0
            sock.sendall(pickle.dumps(packet))

        # If LIST, then give time to list the list before prompting for new input
        if request_type == 0:
            sleep(0.2)

        request_type = -1
Example #40
0
 def serialize(self, out):
     """Converts to a bytes representation that can be parsed back using
     :meth:`~.TranslationStore.parsestring`.
     `out` should be an open file-like objects to write to.
     """
     out.write(pickle.dumps(self))
Example #41
0
 def getData(self):       
     return pickle.dumps(self.bucket)
Example #42
0
def recv_thread_func(wrap, cond_filled, sock):

    global number_of_songs

    recv_string = ""
    buf_count = 0

    packet = {}
    packet["type"] = "list_length_request"
    sock.sendall(pickle.dumps(packet))
    data = sock.recv(200)
    packet = pickle.loads(data)
    number_of_songs = int(packet["msg"])

    while True:

        # Gather the packet
        try:
            data = sock.recv(20000)
            packet = pickle.loads(data)
        except:
            pass

        if packet["type"] == "server_stop":
            wrap.data = ""
            wrap.mp = None
            recv_string = ""
            buf_count = 0

        if packet["last"] == True:

            if packet["type"] == "server_song":
                wrap.data += recv_string
                if wrap.mf == None:
                    wrap.mf = mad.MadFile(wrap)

                recv_string = ""
                buf_count = 0

        # If list response
        if packet["type"] == "server_list":

            while True:

                if data:
                    recv_string += packet["msg"]
                    if packet["last"] == True:
                        break
                else:
                    break

            for index, song_name in pickle.loads(recv_string).iteritems():
                print(str(index) + ": " + song_name)

        # If list response
        if packet["type"] == "server_song":

            if data:
                recv_string += packet["msg"]
                buf_count += 1

                packet["type"] = "client_ack"
                packet["seq"] = packet["seq"] + 1
                packet["msg"] = ""
                packet["len"] = len(packet["msg"])
                sock.sendall(pickle.dumps(packet))

                if buf_count >= BUF_TO_STREAM:

                    wrap.data += recv_string
                    if wrap.mf == None:
                        wrap.mf = mad.MadFile(wrap)

                    recv_string = ""
                    buf_count = 0

            else:
                break

    pass
Example #43
0
 def __contains__(self, key):
     _key = pickle.dumps(key, 2)
     return self.data.has_key(_key)
Example #44
0
def write_pklz(obj, *file_path):
    data = zlib.compress(cPickle.dumps(obj, cPickle.HIGHEST_PROTOCOL))
    with open(path.join(*file_path), 'w') as fd:
        fd.write(data)
Example #45
0
 def __setitem__(self, key, item):
     _key = pickle.dumps(key, 2)
     self.data[_key] = pickle.dumps(item, 2)
Example #46
0
                rzfscmd = '"zfs list -Ho name -t snapshot -d 1 \'%s\' | tail -n 1 | cut -d@ -f2"' % (remotefs_final)
                sshproc = pipeopen('%s -p %d %s %s' % (sshcmd, remote_port, remote, rzfscmd))
                output = sshproc.communicate()[0]
                if output != '':
                    expected_local_snapshot = '%s@%s' % (localfs, output.split('\n')[0])
                    if expected_local_snapshot == snapname:
                        log.warn("Snapshot %s already exist on remote, marking as such" % (snapname))
                        system('%s -p %d %s "/sbin/zfs inherit -r freenas:state \'%s\'"' % (sshcmd, remote_port, remote, remotefs_final))
                        # Replication was successful, mark as such
                        MNTLOCK.lock()
                        system('/sbin/zfs inherit freenas:state "%s"' % (snapname))
                        system('/sbin/zfs release -r freenas:repl "%s"' % (snapname))
                        MNTLOCK.unlock()
                        continue

        # Something wrong, report.
        log.warn("Replication of %s failed with %s" % (snapname, msg))
        error, errmsg = send_mail(subject="Replication failed!", text=\
            """
Hello,
    The system was unable to replicate snapshot %s to %s
======================
%s
            """ % (localfs, remote, msg), interval=datetime.timedelta(hours=2), channel='autorepl')
        break

with open(REPL_RESULTFILE, 'w') as f:
    f.write(cPickle.dumps(results))
os.remove('/var/run/autorepl.pid')
log.debug("Autosnap replication finished")
Example #47
0
 def setdefault(self, key, failobj=None):
     _key = pickle.dumps(key, 2)
     if _key not in self.data:
         self.data[_key] = pickle.dumps(failobj, 2)
     return pickle.loads(self.data[_key])
Example #48
0
 def __delitem__(self, key):
     _key = pickle.dumps(key, 2)
     if not self.data.has_key(_key):
         raise KeyError(key)
     del self.data[_key]
Example #49
0
 def has_key(self, key):
     _key = pickle.dumps(key, 2)
     return self.data.has_key(_key)
Example #50
0
 def fromkeys(cls, iterable, value=None):
     d = cls()
     for key in iterable:
         _key = pickle.dumps(key, 2)
         d[_key] = pickle.dumps(value, 2)
     return d
Example #51
0
    def toHdf5(self,fileName,group='component1/part1'):
        """
        Dump field to HDF5, in a simple format suitable for interoperability (TODO: document).

        :param str fileName: HDF5 file
        :param str group: HDF5 group the data will be saved under.

        The HDF hierarchy is like this::

            group
              |
              +--- mesh_01 {hash=25aa0aa04457}
              |      +--- [vertex_coords]
              |      +--- [cell_types]
              |      \--- [cell_vertices]
              +--- mesh_02 {hash=17809e2b86ea}
              |      +--- [vertex_coords]
              |      +--- [cell_types]
              |      \--- [cell_vertices]
              +--- ...
              +--- field_01
              |      +--- -> mesh_01
              |      \--- [vertex_values]
              +--- field_02
              |      +--- -> mesh_01
              |      \--- [vertex_values]
              +--- field_03
              |      +--- -> mesh_02
              |      \--- [cell_values]
              \--- ...

        where ``plain`` names are HDF (sub)groups, ``[bracketed]`` names are datasets, ``{name=value}`` are HDF attributes, ``->`` prefix indicated HDF5 hardlink (transparent to the user); numerical suffixes (``_01``, ...) are auto-allocated. Mesh objects are hardlinked using HDF5 hardlinks if an identical mesh is already stored in the group, based on hexdigest of its full data.

        .. note:: This method has not been tested yet. The format is subject to future changes.
        """
        import h5py, hashlib
        hdf=h5py.File(fileName,'a',libver='latest')
        if group not in hdf: gg=hdf.create_group(group)
        else: gg=hdf[group]
        # raise IOError('Path "%s" is already used in "%s".'%(path,fileName))
        def lowestUnused(trsf,predicate,start=1):
            'Find the lowest unused index, where *predicate* is used to test for existence, and *trsf* transforms integer (starting at *start* and incremented until unused value is found) to whatever predicate accepts as argument. Lowest transformed value is returned.'
            import itertools,sys
            for i in itertools.count(start=start):
                t=trsf(i)
                if not predicate(t): return t
        # save mesh (not saved if there already)
        newgrp=lowestUnused(trsf=lambda i:'mesh_%02d'%i,predicate=lambda t:t in gg)
        mh5=self.getMesh().asHdf5Object(parentgroup=gg,newgroup=newgrp)

        if self.value:
            fieldGrp=hdf.create_group(lowestUnused(trsf=lambda i,group=group: group+'/field_%02d'%i,predicate=lambda t: t in hdf))
            fieldGrp['mesh']=mh5
            fieldGrp.attrs['fieldID']=self.fieldID
            fieldGrp.attrs['valueType']=self.valueType
            # string/bytes may not contain NULL when stored as string in HDF5
            # see http://docs.h5py.org/en/2.3/strings.html
            # that's why we cast to opaque type "void" and uncast using tostring before unpickling
            fieldGrp.attrs['units']=numpy.void(pickle.dumps(self.unit))
            fieldGrp.attrs['time']=numpy.void(pickle.dumps(self.time))
            #fieldGrp.attrs['time']=self.time.getValue()
            if self.fieldType==FieldType.FT_vertexBased:
                val=numpy.empty(shape=(self.getMesh().getNumberOfVertices(),self.getRecordSize()),dtype=numpy.float)
                for vert in range(self.getMesh().getNumberOfVertices()): val[vert]=self.getVertexValue(vert).getValue()
                fieldGrp['vertex_values']=val
            elif self.fieldType==FieldType.FT_cellBased:
                # raise NotImplementedError("Saving cell-based fields to HDF5 is not yet implemented.")
                val=numpy.empty(shape=(self.getMesh().getNumberOfCells(),self.getRecordSize()),dtype=numpy.float)
                for cell in range(self.getMesh().getNumberOfCells()):
                    val[cell]=self.getCellValue(cell)
                fieldGrp['cell_values']=val
            else: raise RuntimeError("Unknown fieldType %d."%(self.fieldType))
Example #52
0
 def get(self, key, failobj=None):
     _key = pickle.dumps(key, 2)
     if not self.data.has_key(_key):
         return failobj
     return pickle.loads(self.data[_key])
Example #53
0
 def _SendCall(self, msg):
     logger.GetLog().debug('ClientRPCHandler _SendCall : %s' % msg)
     msg_str = cPickle.dumps(msg)
     ModObjFac.CreateApp().send_msg_to_server_node(self.client_type, msg_str, *self.args)
    def get_string(self, **kwargs):
        """Return string representation of table in current state.

        Arguments:

        start - index of first data row to include in output
        end - index of last data row to include in output PLUS ONE (list slice style)
        fields - names of fields (columns) to include
        header - print a header showing field names (True or False)
        border - print a border around the table (True or False)
        hrules - controls printing of horizontal rules after rows.  Allowed values: FRAME, ALL, NONE
        padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
        left_padding_width - number of spaces on left hand side of column data
        right_padding_width - number of spaces on right hand side of column data
        vertical_char - single character string used to draw vertical lines
        horizontal_char - single character string used to draw horizontal lines
        junction_char - single character string used to draw line junctions
        sortby - name of field to sort rows by
        reversesort - True or False to sort in descending or ascending order"""

        options = self._get_options(kwargs)

        if self._caching:
            key = cPickle.dumps(options)
            if key in self._cache:
                return self._cache[key]

        bits = []
        if not self._field_names:
            return ""
        if not options["header"]:
            # Recalculate widths - avoids tables with long field names but narrow data looking odd
            old_widths = self._widths[:]
            self._widths = [0] * _get_size(self._field_names)[0]
            for row in self._rows:
                for i in range(0, len(row)):
                    if _get_size(_unicode(row[i]))[0] > self._widths[i]:
                        self._widths[i] = _get_size(_unicode(row[i]))[0]
        if options["header"]:
            bits.append(self._stringify_header(options))
        elif options["border"] and options["hrules"] != NONE:
            bits.append(self._stringify_hrule(options))
        if options["sortby"]:
            rows = self._get_sorted_rows(options)
        else:
            rows = self._rows[options["start"]:options["end"]]
        for row in rows:
            bits.append(self._stringify_row(row, options))
        if options["border"] and not options["hrules"]:
            bits.append(self._stringify_hrule(options))
        string = "\n".join(bits)

        if self._caching:
            self._cache[key] = string

        if not options["header"]:
            # Restore previous widths
            self._widths = old_widths
            for row in self._rows:
                for i in range(0, len(row)):
                    if _get_size(_unicode(row[i]))[0] > self._widths[i]:
                        self._widths[i] = _get_size(_unicode(row[i]))[0]

        self._nonunicode = string
        return _unicode(string)
Example #55
0
 def packageSuccess(self, obj):
     # print 'returning: ',obj
     serial = pickle.dumps(obj, 2)
     return xmlrpc.Binary(serial)
Example #56
0
def pickle_object(o):
    """Pickles the specified model and its weights."""
    return pickle.dumps(o, -1)
Example #57
0
def enpickle(data):
    "Encodes a value as a string for storage in a table."
    return dumps(data, -1)
            return NotImplemented
        result = Counter()
        for elem, count in self.items():
            other_count = other[elem]
            newcount = count if count < other_count else other_count
            if newcount > 0:
                result[elem] = newcount
        return result


if __name__ == '__main__':
    # verify that instances can be pickled
    from cPickle import loads, dumps
    Point = namedtuple('Point', 'x, y', True)
    p = Point(x=10, y=20)
    assert p == loads(dumps(p))

    # test and demonstrate ability to override methods
    class Point(namedtuple('Point', 'x y')):
        __slots__ = ()
        @property
        def hypot(self):
            return (self.x ** 2 + self.y ** 2) ** 0.5
        def __str__(self):
            return 'Point: x=%6.3f  y=%6.3f  hypot=%6.3f' % (self.x, self.y, self.hypot)

    for p in Point(3, 4), Point(14, 5/7.):
        print p

    class Point(namedtuple('Point', 'x y')):
        'Point class with optimized _make() and _replace() without error-checking'
Example #59
0
                  iouvext=30,
                  iouvint=31,
                  iba=27,
                  nbpbf=20,
                  pourcentbf=15)
 print pp
 dp = pp.toDump()
 pp.load(dp)
 ppp = ProfsParam1(**dp)
 print ppp == pp
 #     pp._nptprof = 60
 #     print pp
 #     pp.iouverture = (30, 35)
 #     print pp
 import cPickle
 ppkl = cPickle.dumps(pp, protocol=0)
 #     print ppkl
 ppp = cPickle.loads(ppkl)
 print 'pp==ppp ?', pp == ppp
 print 20 * "="
 #     p = ProfsParam1(nptprof=86, iouvext=46, iouvint=53, iba=40)
 #     print 'p : ', p
 #     print 'p.copy : ',p.copy
 #     print 'p==p.copy ? :', p==p.copy
 #     p.iouverture = 46, 80
 #     print p
 #     app = QApplication(sys.argv)
 #     gpp = GuiProfParam(parent=None, profparam=pp)
 #     print gpp.exec_()
 #     print gpp.old
 #     print '     ===>'
Example #60
0
def week_time(request, term=None, get_average=False, get_total=False):
    """班班通授课综合分析>周授课时长分析"""
    school_year = request.GET.get('school_year', None)
    term_type = request.GET.get('term_type', None)
    if not term:
        terms = models.Term.objects.all()
        if school_year and term_type:
            terms = terms.filter(school_year=school_year, term_type=term_type)
            term = terms[0]

    if not (school_year or term_type) and not term:
        # term = models.NewTerm.get_current_term()
        term = models.NewTerm.get_nearest_term()

    cached_value = None
    key = None
    if term:
        _cache_key_prefix = 'teaching-analysis-week-time'
        town_name = request.GET.get('town_name')
        school_name = request.GET.get('school_name')
        grade_name = request.GET.get('grade_name')
        class_name = request.GET.get('class_name')
        key = '%s:%s-%s-%s-%s-%s-%s' % (_cache_key_prefix, term.school_year,
                                        term.term_type, town_name, school_name,
                                        grade_name, class_name)
        if not settings.DEBUG:
            cached_value = cache.get(key)

    if cached_value:
        records = cPickle.loads(cached_value)
    else:
        if term:
            terms = models.Term.objects.filter(school_year=term.school_year,
                                               term_type=term.term_type)
        else:
            terms = models.Term.get_current_term_list()
        objs = _query2(request, terms)
        records = model_list_to_dict(objs)
        if key:
            cache.set(key, cPickle.dumps(records, cPickle.HIGHEST_PROTOCOL),
                      _t(key, 60 * 60 * 4))

    # 下面两种情况是用于其他函数调用
    # 返回未格式化的原始数据
    if get_average:
        # 如果是求平均的话,就需要计算班级总数
        # 返回数据格式: {1: 1.2222, 2: 2.344}
        if not term and terms:
            term = terms[0]
        cond = models.Statistic.get_filter_condition(request.REQUEST, 'class',
                                                     term)
        # cond = models.Statistic.get_filter_condition({'school_year': term.school_year, 'term_type': term.term_type}, 'class', term)
        class_count = models.Statistic.objects.filter(**cond).count()
        class_count = class_count > 0 and class_count or 1
        for i in records:
            i['week_time'] = i['week_time'] * 1.0 / class_count / 60
        records = {i['week']: i['week_time'] for i in records}

    elif get_total:
        # 如果是求累积的话
        # 返回数据格式: {1: 2, 2: 4, 3: 6}
        records = {i['week']: i['week_time'] * 1.0 / 60 for i in records}
        records = records and records or {1: 0}
        for i in range(1, max(records.keys()) + 1):
            records[i] = records[i] + records.get(i - 1, 0)

    else:
        records = [{
            'week': i['week'],
            'week_time': float('%.0f' % (i['week_time'] * 1.0 / 60))
        } for i in records]

    return create_success_dict(data=records)