def test_copying(self):
     # Check that counters are copyable, deepcopyable, picklable, and
     #have a repr/eval round-trip
     words = Counter('which witch had which witches wrist watch'.split())
     update_test = Counter()
     update_test.update(words)
     for i, dup in enumerate([
                 words.copy(),
                 copy.copy(words),
                 copy.deepcopy(words),
                 pickle.loads(pickle.dumps(words, 0)),
                 pickle.loads(pickle.dumps(words, 1)),
                 pickle.loads(pickle.dumps(words, 2)),
                 pickle.loads(pickle.dumps(words, -1)),
                 cPickle.loads(cPickle.dumps(words, 0)),
                 cPickle.loads(cPickle.dumps(words, 1)),
                 cPickle.loads(cPickle.dumps(words, 2)),
                 cPickle.loads(cPickle.dumps(words, -1)),
                 eval(repr(words)),
                 update_test,
                 Counter(words),
                 ]):
         msg = (i, dup, words)
         self.assertTrue(dup is not words)
         self.assertEquals(dup, words)
         self.assertEquals(len(dup), len(words))
         self.assertEquals(type(dup), type(words))
示例#2
0
def unpicklable(target_dict, key):
    try:
        dumps(target_dict[key])
    except PicklingError:
        return ("{} can't be pickled.  If it is a function, try defining "+\
                "it in the module space rather than in a class or closure.").\
                format(key)
示例#3
0
def test_pickle():
    import pickle

    # classification
    obj = tree.DecisionTreeClassifier()
    obj.fit(iris.data, iris.target)
    score = obj.score(iris.data, iris.target)
    s = pickle.dumps(obj)

    obj2 = pickle.loads(s)
    assert_equal(type(obj2), obj.__class__)
    score2 = obj2.score(iris.data, iris.target)
    assert score == score2, "Failed to generate same score " + \
        " after pickling (classification) "

    # regression
    obj = tree.DecisionTreeRegressor()
    obj.fit(boston.data, boston.target)
    score = obj.score(boston.data, boston.target)
    s = pickle.dumps(obj)

    obj2 = pickle.loads(s)
    assert_equal(type(obj2), obj.__class__)
    score2 = obj2.score(boston.data, boston.target)
    assert score == score2, "Failed to generate same score " + \
        " after pickling (regression) "
示例#4
0
文件: data.py 项目: tolkjen/Degree
    def add_scores(self, space, scores, latest, unfinished):
        """
        Store partial results of the calculation.
        :param space: SearchSpace object explored during calculations.
        :param scores: List of tuples (precision, sensitivity, f1).
        :param latest: Highest index of a description pair for which the 
        results have been calculated.
        :param unfinished: List of tuples describing ranges of indices of 
        descriptor pairs for which the results haven't been calculated yet, 
        indices < latest.
        """
        session = self._session_factory()
        space_obj = session.query(SearchOperation).filter_by(space_descr=str(space)).first()
        if not space_obj:
            space_obj = SearchOperation(space_descr=str(space), space=pickle.dumps(space),
                                        enabled=True, done=False)
            session.add(space_obj)
            session.commit()
        space_obj.latest = latest
        space_obj.unfinished = pickle.dumps(unfinished)

        scores_obj = SearchSpaceScore(space_id=space_obj.id)
        scores_obj.scores = pickle.dumps(scores)
        session.add(scores_obj)
        session.commit()
        session.close()
示例#5
0
    def test_executor(self):
        executor = MyExecutor()
        driver = MockExecutorDriver(executor)
      
        executorInfo = mesos_pb2.ExecutorInfo()
        executorInfo.executor_id.value = "test-id"
        executorInfo.data = marshal.dumps(("./", os.getcwd(), sys.path, 8, "", "", 1, 
            {'DPARK_HAS_DFS':'False', 'WORKDIR':'/tmp/xxxxx'}))

        frameworkInfo = mesos_pb2.FrameworkInfo()
        frameworkInfo.id.value = "test"

        slaveInfo = mesos_pb2.SlaveInfo()
        slaveInfo.id.value = "test-slave"
        slaveInfo.hostname = socket.gethostname()

        executor.registered(driver, executorInfo, frameworkInfo, slaveInfo)
        assert executor.init_args

        task = mesos_pb2.TaskInfo()
        task.name = 'test-task'
        task.task_id.value = '1'
        task.slave_id.value = 'test-slave'
        task.data = pickle.dumps((TestTask(1), 1), -1)
        executor.launchTask(driver, task)

        task.task_id.value = '2'
        task.data = pickle.dumps((TestTask(2), 1), -1)
        executor.launchTask(driver, task)
        
        executor.frameworkMessage(driver, 'data')
        executor.killTask(driver, task.task_id)
示例#6
0
 def test_is_pickable(self):
     m = DefaultsModel()
     m.save()
     try:
         pickle.dumps(m)
     except TypeError as e:
         self.fail('pickle of DefaultsModel failed: %s' % e)
示例#7
0
    def test_cached_list(self):

        # Set up the test data.
        users = User.objects.all()[:10]
        user_cache = TestUserCachedList(users)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # Force it through the pickle cycle.
        user_cache = pickle.loads(pickle.dumps(user_cache))
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # The pickle size is greatly reduced. While making this test, it went
        # from 6377 bytes to 201 bytes. To avoid a brittle test, just check
        # that it's less that half the size.
        normal_pickle_size = len(pickle.dumps(users))
        improved_pickle_size = len(pickle.dumps(user_cache))
        self.assertTrue(improved_pickle_size < normal_pickle_size / 2.0)

        # Force it through the cache cycle.
        cache_key = 'apncore.cache.tests.test_cached_list'
        user_cache.cache(cache_key)
        user_cache = cache.get(cache_key)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])

        # Delete the cached items, forcing the class to rebuild them.
        # The main list must be retrieved again to test unpacking its items.
        item_cache_keys = list(user_cache.make_cache_keys([user.pk for user in users]))
        cache.delete_many(item_cache_keys)
        user_cache = cache.get(cache_key)
        self.assertEqual([user.pk for user in users], [user.pk for user in user_cache])
示例#8
0
    def CreateTable(self, table, columns):
        """CreateTable(table, columns) - Create a new table in the database.
        
        raises TableDBError if it already exists or for other DB errors.
        """
        raise isinstance(columns, list) or AssertionError
        txn = None
        try:
            if contains_metastrings(table):
                raise ValueError('bad table name: contains reserved metastrings')
            for column in columns:
                if contains_metastrings(column):
                    raise ValueError('bad column name: contains reserved metastrings')

            columnlist_key = _columns_key(table)
            if getattr(self.db, 'has_key')(columnlist_key):
                raise TableAlreadyExists, 'table already exists'
            txn = self.env.txn_begin()
            getattr(self.db, 'put_bytes', self.db.put)(columnlist_key, pickle.dumps(columns, 1), txn=txn)
            tablelist = pickle.loads(getattr(self.db, 'get_bytes', self.db.get)(_table_names_key, txn=txn, flags=db.DB_RMW))
            tablelist.append(table)
            self.db.delete(_table_names_key, txn=txn)
            getattr(self.db, 'put_bytes', self.db.put)(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
            txn.commit()
            txn = None
        except db.DBError as dberror:
            if txn:
                txn.abort()
            if sys.version_info < (2, 6):
                raise TableDBError, dberror[1]
            else:
                raise TableDBError, dberror.args[1]

        return
示例#9
0
    def test_copy_pickle(self):

        d = Deque('abc')

        e = d.__copy__()
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        e = Deque(d)
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
            s = pickle.dumps(d, proto)
            e = pickle.loads(s)
            self.assertNotEqual(id(d), id(e))
            self.assertEqual(type(d), type(e))
            self.assertEqual(list(d), list(e))

        d = Deque('abcde', maxlen=4)

        e = d.__copy__()
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        e = Deque(d)
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
            s = pickle.dumps(d, proto)
            e = pickle.loads(s)
            self.assertNotEqual(id(d), id(e))
            self.assertEqual(type(d), type(e))
            self.assertEqual(list(d), list(e))
示例#10
0
    def test_make_proxy_disc(self):
        abc = DiscreteVariable("abc", values="abc", ordered=True)
        abc1 = abc.make_proxy()
        abc2 = abc1.make_proxy()
        self.assertIs(abc.master, abc)
        self.assertIs(abc1.master, abc)
        self.assertIs(abc2.master, abc)
        self.assertEqual(abc, abc1)
        self.assertEqual(abc, abc2)
        self.assertEqual(abc1, abc2)
        self.assertEqual(hash(abc), hash(abc1))
        self.assertEqual(hash(abc1), hash(abc2))

        abcx = DiscreteVariable("abc", values="abc", ordered=True)
        self.assertNotEqual(abc, abcx)

        abc1p = pickle.loads(pickle.dumps(abc1))
        self.assertIs(abc1p.master, abc)
        self.assertEqual(abc1p, abc)

        abcp, abc1p, abc2p = pickle.loads(pickle.dumps((abc, abc1, abc2)))
        self.assertIs(abcp.master, abcp.master)
        self.assertIs(abc1p.master, abcp.master)
        self.assertIs(abc2p.master, abcp.master)
        self.assertEqual(abcp, abc1p)
        self.assertEqual(abcp, abc2p)
        self.assertEqual(abc1p, abc2p)
示例#11
0
    def request(self, *args, **kwargs):
        """Maintains the existing api for Session.request.

        Used by all of the higher level methods, e.g. Session.get.

        The background_callback param allows you to do some processing on the
        response in the background, e.g. call resp.json() so that json parsing
        happens in the background thread.
        """
        if self.session:
            func = self.session.request
        else:
            # avoid calling super to not break pickled method
            func = partial(Session.request, self)

        background_callback = kwargs.pop('background_callback', None)
        if background_callback:
            func = partial(wrap, self, func, background_callback)

        if isinstance(self.executor, ProcessPoolExecutor):
            # verify function can be pickled
            try:
                dumps(func)
            except (TypeError, PickleError):
                raise RuntimeError(PICKLE_ERROR)

        return self.executor.submit(func, *args, **kwargs)
示例#12
0
文件: SinaL2.py 项目: arong-me/dHydra
    def on_recv_data(self, message):
        parsed_msg = util.ws_parse(
            message,
            trading_date = self.trading_date,
            to_dict=True
        )

        # 更新redis中行情内容
        for data in parsed_msg:
            try:
                if data["data_type"] == "orders":
                    self.publish(
                        data = pickle.dumps(data),
                        channel_name = "dHydra.SinaL2." +
                                       data["symbol"] + ".orders"
                    )
                elif data["data_type"] == "quotation":
                    self.publish(
                        data = pickle.dumps(data),
                        channel_name = "dHydra.SinaL2." +
                                       data["symbol"] + ".quotation"
                    )
                elif data["data_type"] == "transaction":
                    self.publish(
                        data = pickle.dumps(data),
                        channel_name = "dHydra.SinaL2." +
                                       data["symbol"] + ".transaction"
                    )
            except Exception as e:
                self.logger.error(e)

        self.publish(pickle.dumps(parsed_msg))
示例#13
0
    def testDatabaseFixes(self):
        # Hack the pickle to make it refer to a timezone abbreviation
        # that does not match anything. The unpickler should be able
        # to repair this case
        tz = pytz.timezone('Australia/Melbourne')
        p = pickle.dumps(tz)
        tzname = tz._tzname
        hacked_p = p.replace(_byte_string(tzname), _byte_string('???'))
        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertTrue(tz is unpickled_tz)

        # Simulate a database correction. In this case, the incorrect
        # data will continue to be used.
        p = pickle.dumps(tz)
        new_utcoffset = tz._utcoffset.seconds + 42

        # Python 3 introduced a new pickle protocol where numbers are stored in
        # hexadecimal representation. Here we extract the pickle
        # representation of the number for the current Python version.
        old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
        new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
        hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)

        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
        self.assertTrue(tz is not unpickled_tz)
示例#14
0
    def test01(self):
        tabname = "test01"
        colname = 'cool numbers'
        try:
            self.tdb.Drop(tabname)
        except dbtables.TableDBError:
            pass
        self.tdb.CreateTable(tabname, [colname])
        import sys
        if sys.version_info[0] < 3 :
            self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159, 1)})
        else :
            self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159,
                1).decode("iso8859-1")})  # 8 bits

        if verbose:
            self.tdb._db_print()

        values = self.tdb.Select(
            tabname, [colname], conditions={colname: None})

        import sys
        if sys.version_info[0] < 3 :
            colval = pickle.loads(values[0][colname])
        else :
            colval = pickle.loads(bytes(values[0][colname], "iso8859-1"))
        self.assert_(colval > 3.141)
        self.assert_(colval < 3.142)
示例#15
0
def http_method(method_name):
    if request.method == 'GET':
        return render_template('xmlrpc-methods.html', methods = current_app.wl_server_methods)

    if method_name not in current_app.wl_server_methods:
        return "Method name not supported", 404

    server_instance = current_app.wl_server_instance

    raw_data = request.get_data()
    args = pickle.loads(raw_data)
    

    try:
        if method_name == 'test_me':
            result = args[0]
        else:
            method = getattr(server_instance, 'do_%s' % method_name)
            result = method(*args)
    except:
        exc_type, exc_instance, _ = sys.exc_info()
        remote_exc_type = _get_type_name(exc_type)
        log.error(__name__, 'Error on %s' % method_name)
        log.error_exc(__name__)
        return pickle.dumps({
            'is_error' : True,
            'error_type' : remote_exc_type,
            'error_args' : exc_instance.args,
        })
    else:
        return pickle.dumps({ 'result' : result })
示例#16
0
文件: models.py 项目: Fingel/potion
 def setAttrs(self, attrs, replace=False):
     if replace:
         self.attributes = dumps(attrs)
         return
     a = loads(self.attributes)
     a.update(attrs)
     self.attributes = dumps(a)
示例#17
0
    def _test_marshalling(self):
        seq_code = "434TTT"
        shot_code = "8324"

        class_name = "TestCommand"

        marshaller = Marshaller()
        marshaller.set_class( class_name )
        marshaller.set_option("asset_code", "chr102")
        marshaller.add_arg(seq_code)
        marshaller.add_arg(shot_code)

        pickle1 = pickle.dumps(marshaller)


        marshalled = marshaller.get_marshalled()

        # create a new marshaller
        marshaller2 = Marshaller()
        marshalled2 = marshaller2.get_from_marshalled(marshalled)

        pickle2 = pickle.dumps(marshaller)


        self.assertEquals(pickle1, pickle2)
示例#18
0
 def test_unpicklable(self):
     self.make_reactors()
     import pickle
     with self.assertRaises(NotImplementedError):
         pickle.dumps(self.r1)
     with self.assertRaises(NotImplementedError):
         pickle.dumps(self.net)
示例#19
0
 def _get_pickling_errors(cls, obj, seen=None):
     """ Return any errors that would be raised if `obj' is pickled
     Courtesy of koffie @ http://stackoverflow.com/a/7218986/109897
     """
     if seen == None:
         seen = []
     try:
         state = obj.__getstate__()
     except AttributeError:
         return
     if state == None:
         return
     if isinstance(state, tuple):
         if not isinstance(state[0], dict):
             state = state[1]
         else:
             state = state[0].update(state[1])
     result = {}
     for i in state:
         try:
             pickle.dumps(state[i], protocol=2)
         except pickle.PicklingError:
             if not state[i] in seen:
                 seen.append(state[i])
                 result[i] = cls._get_pickling_errors(state[i], seen)
     return result
示例#20
0
def initiate(main_port, tracker_addr, uname, pwd):
	sock = create_sock(main_port)
	sock.connect((tracker_addr["ip"], tracker_addr["port"]))
	sock.send(pickle.dumps({"cat" : "get", "item" : "peerlist"}))
	r = sock.recv(1024)
	peerlist = pickle.loads(r)
	sock.close()
	print(peerlist)
	modpath = input("Enter path of module : ")
	if os.path.isfile(modpath) == True:
		command = "python3 " + modpath + " initiate"
		threading.Thread(target = caller, args = (command, ), daemon = True).start()
	else:
		initiate(main_port, tracker_addr, uname, pwd)
	time.sleep(3)
	sock = create_sock(main_port)
	sock.connect(("127.0.0.1", 54321))
	sock.send(pickle.dumps("data"))
	data = pickle.loads(transfer.receiver(sock))
	sock.close()
	print("Data received : ")
	print(data)

	results = initiate2(main_port, tracker_addr, uname, pwd, modpath, data)
	sock = create_sock()
	sock.connect(("127.0.0.1", 54321))
	transfer.sender(sock, pickle.dumps(results))
	sock.close()
示例#21
0
def sendTask(task):
	connection = pika.BlockingConnection(pika.ConnectionParameters(
	        host=REMOTE_IP))
	channel = connection.channel()

	channel.queue_declare(queue=TASK_DESTINATION, durable=True)
	wow = {'Name': 'Zara', 'Age': 7, 'Class': 'First'};
	sendable = pickle.dumps(wow)

	myJob1 = Job(0, 5, [1,2,3,4,5])
	myJob2 = Job(0, 5, [1,2,3,4,6])

	message = pickle.dumps(myJob1)

	channel.basic_publish(exchange='',
	                      routing_key=TASK_DESTINATION,
	                      body=message,
	                      properties=pika.BasicProperties(
	                         delivery_mode = 2, # make message persistent
	                      ))

	message = pickle.dumps(myJob2)

	channel.basic_publish(exchange='',
	                      routing_key=TASK_DESTINATION,
	                      body=message,
	                      properties=pika.BasicProperties(
	                         delivery_mode = 2, # make message persistent
	                      ))

	print(" [TASK] Sent %r" % message)
	connection.close()
示例#22
0
def reader():
    rd = socket(AF_INET, SOCK_DGRAM)
    rd.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
    rd.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
    rd.bind(('', PORT))
    while True:
        msg, (addr, _) = rd.recvfrom(MAX)
        data = pickle.loads(msg)
        if data[:6] == '^rqst&':
            sd.sendto(pickle.dumps('^rply&:'+name), (addr, PORT))
            if addr not in users:
                users[addr] = data[7:]
                print '[{0}] {1} joined to chat room'.format(addr, users[addr])
        elif data[:6] == '^rply&':
            if addr not in users:
                users[addr] = data[7:]
        elif data[:6] == '^mssg&':
            if addr not in users:
                sd.sendto(pickle.dumps('^rqst&:'+name), (my_bcast, PORT))
                print '[{0}] Anonim: '.format(addr) + data[7:]
            else:
                print '[{0}] {1}: '.format(addr, users[addr]) + data[7:]
        elif data[:6] == '^exit&':
            if addr == my_ip:
                rd.close()
                return
            print "[{0}] {1} exit from the chat room".format(addr, users[addr])
            del users[addr]
示例#23
0
def test_deprecated_class():
    orig_A = TA.__bases__[0]

    # The only thing that should be different about the new class
    # is __doc__, __init__, __bases__ and __subclasshook__.
    # and __init_subclass__ for Python 3.6+.
    for x in dir(orig_A):
        if x not in ('__doc__', '__init__', '__bases__', '__dict__',
                     '__subclasshook__', '__init_subclass__'):
            assert getattr(TA, x) == getattr(orig_A, x)

    with catch_warnings(AstropyDeprecationWarning) as w:
        TA()

    assert len(w) == 1
    if TA.__doc__ is not None:
        assert 'function' not in TA.__doc__
        assert 'deprecated' in TA.__doc__
        assert 'function' not in TA.__init__.__doc__
        assert 'deprecated' in TA.__init__.__doc__

    # Make sure the object is picklable
    pickle.dumps(TA)

    with catch_warnings(NewDeprecationWarning) as w:
        TC()

    assert len(w) == 1
    assert w[0].category == NewDeprecationWarning
示例#24
0
 def submitFinalStage(self, stage, jobId):
     stage.pendingTasks = []
     partitionsToCompute = []
     if stage.isShuffle:
         partitionsToCompute = [i for i in range(stage.numPartitions) if not stage.outputLocs[i]]
     else:
         job = stage.resultOfJob
         partitionsToCompute = [i for i in range(job.numPartitions) if not job.finished[i]]
     print "Target partitions: " + str(partitionsToCompute)
     print "Start running stage" + str(stage.id) + " (" + str(stage.rdd.id) + ")"
     self.runningStages.add(stage)
     #print self.runningStages
     taskBinary = ''
     if stage.isShuffle:
         taskBinary = pickle.dumps([stage.rdd, stage.shuffleDep])
     else:
         taskBinary = pickle.dumps([stage.rdd, stage.resultOfJob.func])
     tasks = []
     if stage.isShuffle:
         for id in partitionsToCompute:
             if not stage.outputLocs[id]:
                 tasks.append(ShuffleTask(stage.id, taskBinary, id))
     else:
         job = stage.resultOfJob
         for id in partitionsToCompute:
             if not job.finished[id]:
                 p = job.partitions[id]  #same thing
                 tasks.append(ResultTask(stage.id, taskBinary, p, id))
     if len(tasks) > 0:
         stage.pendingTasks.extend(tasks)
         print "Task set of stage " + str(stage.id) + " (" + str(stage.rdd.id) + ") has been submitted"
         self.taskScheduler.submitTasks(TaskSet(tasks, stage.id, stage.jobId))
示例#25
0
    def test_pickling(self):
        # Create a template response. The context is
        # known to be unpicklable (e.g., a function).
        response = SimpleTemplateResponse('first/test.html', {
            'value': 123,
            'fn': datetime.now,
        })
        with self.assertRaises(ContentNotRenderedError):
            pickle.dumps(response)

        # But if we render the response, we can pickle it.
        response.render()
        pickled_response = pickle.dumps(response)
        unpickled_response = pickle.loads(pickled_response)

        self.assertEqual(unpickled_response.content, response.content)
        self.assertEqual(unpickled_response['content-type'], response['content-type'])
        self.assertEqual(unpickled_response.status_code, response.status_code)

        # ...and the unpickled response doesn't have the
        # template-related attributes, so it can't be re-rendered
        template_attrs = ('template_name', 'context_data', '_post_render_callbacks')
        for attr in template_attrs:
            self.assertFalse(hasattr(unpickled_response, attr))

        # ...and requesting any of those attributes raises an exception
        for attr in template_attrs:
            with self.assertRaises(AttributeError):
                getattr(unpickled_response, attr)
    def test_copy_pickle(self):

        d = Deque('abc')

        e = d.__copy__()
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        e = Deque(d)
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        s = pickle.dumps(d)
        e = pickle.loads(s)
        self.assertNotEqual(id(d), id(e))
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        d = Deque('abcde', maxlen=4)

        e = d.__copy__()
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        e = Deque(d)
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))

        s = pickle.dumps(d)
        e = pickle.loads(s)
        self.assertNotEqual(id(d), id(e))
        self.assertEqual(type(d), type(e))
        self.assertEqual(list(d), list(e))
示例#27
0
文件: server.py 项目: OliverUv/CoVim
 def handle_GETNAME(self, name):
     # Handle duplicate name
     if userManager.has_user(name):
         d = {"packet_type": "message", "data": {"message_type": "error_newname_taken"}}
         self.transport.write(pickle.dumps(d))
         return
     # Handle spaces in name
     if not name_validate(name):
         d = {"packet_type": "message", "data": {"message_type": "error_newname_invalid"}}
         self.transport.write(pickle.dumps(d))
         return
     # Name is Valid, Add to Document
     self.user = User(name, self)
     userManager.add_user(self.user)
     self.state = "CHAT"
     d = {
         "packet_type": "message",
         "data": {"message_type": "connect_success", "name": name, "collaborators": userManager.all_users_to_json()},
     }
     if userManager.is_multi():
         d["data"]["buffer"] = self.factory.buff
     self.transport.write(pickle.dumps(d))
     print 'User "' + self.user.name + '" Connected'
     # Alert other Collaborators of new user
     d = {"packet_type": "message", "data": {"message_type": "user_connected", "user": self.user.to_json()}}
     self.user.broadcast_packet(d)
示例#28
0
    def test_pickle_parameters(self):
        # test that we can pickle a Parameters object
        p = Parameters()
        p.add('a', 10, True, 0, 100)
        p.add('b', 10, True, 0, 100, 'a * sin(1)')
        p.update_constraints()
        p._asteval.symtable['abc'] = '2 * 3.142'

        pkl = pickle.dumps(p, -1)
        q = pickle.loads(pkl)

        q.update_constraints()
        assert_(p == q)
        assert_(not p is q)

        # now test if the asteval machinery survived
        assert_(q._asteval.symtable['abc'] == '2 * 3.142')

        # check that unpickling of Parameters is not affected by expr that
        # refer to Parameter that are added later on. In the following
        # example var_0.expr refers to var_1, which is a Parameter later
        # on in the Parameters OrderedDict.
        p = Parameters()
        p.add('var_0', value=1)
        p.add('var_1', value=2)
        p['var_0'].expr = 'var_1'
        pkl = pickle.dumps(p)
        q = pickle.loads(pkl)
示例#29
0
 def test_pickle(self):
     vlist = _create_variable_list(3)
     ctuple = matrix_constraint(
         numpy.array([[0,0,0],[0,0,0]]),
         x=vlist)
     self.assertTrue((ctuple.lb == -numpy.inf).all())
     self.assertTrue((ctuple.ub == numpy.inf).all())
     self.assertTrue((ctuple.equality == False).all())
     self.assertEqual(ctuple.parent, None)
     ctuple_up = pickle.loads(
         pickle.dumps(ctuple))
     self.assertTrue((ctuple_up.lb == -numpy.inf).all())
     self.assertTrue((ctuple_up.ub == numpy.inf).all())
     self.assertTrue((ctuple_up.equality == False).all())
     self.assertEqual(ctuple_up.parent, None)
     b = block()
     b.ctuple = ctuple
     self.assertIs(ctuple.parent, b)
     bup = pickle.loads(
         pickle.dumps(b))
     ctuple_up = bup.ctuple
     self.assertTrue((ctuple_up.lb == -numpy.inf).all())
     self.assertTrue((ctuple_up.ub == numpy.inf).all())
     self.assertTrue((ctuple_up.equality == False).all())
     self.assertIs(ctuple_up.parent, bup)
示例#30
0
文件: cache.py 项目: davidhariri/api
        def func(*args, **kwargs):
            args_list = map(lambda k: (k, kwargs[k]), kwargs.keys())
            args_key = str(pickle.dumps(sorted(list(args_list))))

            if debug:
                print(">> CACHE:ARGS:", [kwargs])

            cache_key = "{namespace}:{hash}".format(
                namespace=namespace or function.__name__,
                hash=hashlib.md5(args_key.encode("utf-8")).hexdigest()
            )

            try:
                cache_result = cache.get(cache_key)
            except Exception:
                cache_result = None

            # If the result was None, we need to evaluate the function
            if not cache_result:
                if debug:
                    print(">> CACHE:MISS: ", cache_key)

                func_result = function(*args, **kwargs)

                # Save the result to the cache
                cache.setex(cache_key, expiry, pickle.dumps(func_result))
            else:
                if debug:
                    print(">> CACHE:HIT: ", cache_key)

                func_result = pickle.loads(cache_result)

            return func_result
示例#31
0
 def testPickle(self):
     testvec = Vec2d(5, .3)
     testvec_str = pickle.dumps(testvec)
     loaded_vec = pickle.loads(testvec_str)
     self.assertEquals(testvec, loaded_vec)
示例#32
0
def test_pickling_exceptions():
    exc = System.Exception("test")
    dumped = pickle.dumps(exc)
    loaded = pickle.loads(dumped)

    assert exc.args == loaded.args
示例#33
0
def extract_features_from_model(base_model, model_name, feature_shape):

    label_encoder = None

    # loop over the data splits
    for split in (config.TRAIN, config.TEST, config.VAL):
        # grab all image paths in the current split
        p = os.path.sep.join([config.MODEL_DATASET_PATH, split])
        imagePaths = list(paths.list_images(p))

        # randomly shuffle the image paths and then extract the class labels
        # from the file paths.  It is more efficient to shuffle the classes
        # now, instead of during the training;
        random.shuffle(imagePaths)
        # get the labels in the same order as the random image paths
        # path/dataset/training/nonfood/0_123.jpb
        # index of -2 references 'nonfood'
        labels = [imagePath.split(os.path.sep)[-2] for imagePath in imagePaths]

        # if the label encoder is None, create it
        if label_encoder is None:
            label_encoder = LabelEncoder()
            label_encoder.fit(labels)

        # open the output CSV file for writing
        Path(config.BASE_CSV_PATH.format(model_name)).mkdir(parents=True,
                                                            exist_ok=True)
        csvPath = os.path.sep.join(
            [config.BASE_CSV_PATH.format(model_name), f"{split}.csv"])
        csv = open(csvPath, "w")

        # loop over the images in batches that match the batchsize
        # for the model
        # will feed the image through the model in batches
        # to get the resulting vector
        for (b, i) in enumerate(range(0, len(imagePaths), config.BATCH_SIZE)):
            # extract the batch of images and labels, then initialize the
            # list of actual images that will be passed through the network
            # for feature extraction
            logger.info(
                f"Processing batch {b+1}/{int(np.ceil(len(imagePaths)/float(config.BATCH_SIZE)))}"
            )
            batchPaths = imagePaths[i:i + config.BATCH_SIZE]
            batchLabels = label_encoder.transform(labels[i:i +
                                                         config.BATCH_SIZE])
            batchImages = []

            for imagePath in batchPaths:
                # load the input image using the keras helpt utility
                # while ensuring the image is resized to 224x224 pixels
                image = load_img(imagePath, target_size=(224, 224))
                image = img_to_array(image)

                # preprocess the image by:
                # 1 - expanding the dimensions because the model expects and array of array of image values
                #           and what image currently is, is a single array
                image = np.expand_dims(image, axis=0)
                # 2 - subracting the mean RGB pixel intensity from the ImageNet dataset
                image = imagenet_utils.preprocess_input(image)

                # add the image to the batch collection
                batchImages.append(image)

            # at this point we are ready to pass the image through the model network to extract the
            # features.  which in this case is an array/vector of size:  7*7*512

            # pass the images through the network nad use the outputs as
            # our actual features, then reshape the features into a flattened volume
            batchImages = np.vstack(batchImages)
            # recall our base_model has the front FCN layer REMOVED so we are getting the output
            # of the convolutional network.
            features = base_model.predict(batchImages,
                                          batch_size=config.BATCH_SIZE)
            # reshape features into an array of array
            features = features.reshape((features.shape[0], feature_shape))

            # loop over the class labels and extracted features
            for (label, vec) in zip(batchLabels, features):
                # construct a row that exists of the class label and extracted features
                vec = ",".join([str(v) for v in vec])
                csv.write(f"{label},{vec}\n")

        # close file
        csv.close()

    Path(config.LE_PATH.format(model_name)).mkdir(parents=True, exist_ok=True)
    f = open(config.LE_FILE.format(model_name), "wb")
    f.write(pickle.dumps(label_encoder))
    f.close()
示例#34
0
 def pack(value):
     return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
示例#35
0
def encryptObj(key, obj, path):
    byteObj = pickle.dumps(obj)
    encryptedObj = encrypt(key, byteObj, encode=False)
    with open(path, "wb") as file:
        pickle.dump(encryptedObj, file)
示例#36
0
 def test_pickle(self):
     pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1)
     self.assertEqual(pickle.loads(pickle.dumps(pj)), pj)
def receiver(p, neighbours, capacity):
    election_ended = 0
    global turn, chosen_node, num_acks, parent, elid, is_sender
    while True:
        #print('MESSAGE QUEUE:')
        #print(rd.queue[0])

        print ('\nwaiting to receive message')
        data = sock.recv(1024)

        if turn != 1:
            turn = 1

        message = pickle.loads(data)

        message_type = message['TYPE']
        message_pid = message['PID']
        message_elid = message['ELID']
        message_sender = ('127.0.0.1', 10000 + int(message_pid))

        print('MESSAGE TYPE: %s' % message_type)

        if message_type == 'election':
            print ('RECEIVED ELECTION FROM %s' % message_pid)
            print ('CURRENT ELID: %s MESSAGE ELID: %s' % (elid, message_elid))
            if int(message_elid) > int(elid):
                print('ASSIGNING MY PARENT TO %s' % message_pid)
                message = { 'TYPE': 'election', 'PID': p, 'ELID': message_elid }
                parent = message_pid
                elid = message_elid
                for i in filter(lambda x: int(x) != parent, neighbours): # error
                    print ('SENDING TO %s, MY PARENT IS %s' % (i, parent))
                    sent = sock.sendto(pickle.dumps(message), ('127.0.0.1', 10000 + int(i)))
                    time.sleep(3)
                num_acks = 0
                is_sender = 0
            elif message_pid != parent and is_sender != 1:
                print('I HAVE A PARENT')
                message = { 'TYPE': 'ack', 'PID': p, 'ELID': elid, 'NODE': p, 'CAPACITY': capacity }
                sent = sock.sendto(pickle.dumps(message), message_sender)
        elif message_type == 'info' and election_ended != 1: # Election ended
            message_node = message['NODE']
            message_capacity = message['CAPACITY']
            message = { 'TYPE': 'info', 'PID': p, 'ELID': elid, 'NODE': message_node, 'CAPACITY': message_capacity}
            print ('THE NODE %s WITH CAPACITY %s HAS BEEN ELECTED' % (message_node, message_capacity))
            for i in filter(lambda x: int(x) != int(message_pid), neighbours):
                print ('SENDING TO %s, MY INFORMANT IS %s' % (i, message_pid))
                sent = sock.sendto(pickle.dumps(message), ('127.0.0.1', 10000 + int(i)))
            chosen_node['PID'] = message_node
            chosen_node['CAPACITY'] = message_capacity
            election_ended = 1
        elif message_type == 'ack' and int(message_elid) == int(elid):
            print ('RECEIVED ACK FROM %s' % message_pid)
            num_acks = num_acks + 1
            message_node = message['NODE']
            message_capacity = message['CAPACITY']
            if int(message_capacity) > int(chosen_node['CAPACITY']):
                chosen_node['PID'] = message_node
                chosen_node['CAPACITY'] = message_capacity

        if num_acks == len(neighbours) - 1 and election_ended != 1 and is_sender != 1:
            print ('RECEIVED ALL ACKS FROM NEIGHBOURS')
            print ('SENDING TO PARENT NODE %s' % parent)
            message = { 'TYPE': 'ack', 'PID': p, 'ELID': elid, 'NODE': chosen_node['PID'], 'CAPACITY': chosen_node['CAPACITY']}
            sent = sock.sendto(pickle.dumps(message), ('127.0.0.1', 10000 + int(parent)))


        time.sleep(random.random() * 5)
示例#38
0
 def _encode_items(self, a , b):
     return sqlite.Binary(pickle.dumps(a, -1)), sqlite.Binary(pickle.dumps(b, -1))
示例#39
0
def cluster_distributions(output_file, tiff_cluster, tiff_built,
                          tiff_night_light, tiff_population, cities_csv):
    #LOADING OF THE CLUSTERS MAP
    tiff_file = gdal.Open(tiff_cluster)
    arr_img = tiff_file.ReadAsArray()
    arr_img = arr_img.astype(np.int)
    tiff_file = None  #close it
    band = None  #close it

    #DEFINITION OF THE DIFFERENT COORDINATES OF CLUSTERS

    arr_img = np.where(arr_img == -1, 0, arr_img)
    arr_img = np.where(arr_img == -999, 1, arr_img)
    clusters_list = cm.get_all_urban_cluster(arr_img)

    #LOADING OF THE DIFFERENT OUTPUTS
    tiff_file = gdal.Open(tiff_built)
    arr_built = tiff_file.ReadAsArray()
    arr_built = np.where(arr_built == -999, 0, arr_built)

    tiff_file = gdal.Open(tiff_night_light)
    arr_night_light = tiff_file.ReadAsArray()
    arr_night_light = np.where(arr_night_light == -999, 0, arr_night_light)

    tiff_file = gdal.Open(tiff_population)
    arr_population = tiff_file.ReadAsArray()
    tiff_file = None
    arr_population = np.where(arr_population == -999, 0, arr_population)

    #CREATION OF DISTRIBUTIONS FOR EACH CLUSTERS
    clusters = []
    for c in clusters_list:
        if len(c) > 1:
            cluster = {}
            cluster["coordinates"] = c
            built_density = []
            night_light_density = []
            population_density = []
            for x, y in c:
                built_density.append(arr_built[x, y])
                night_light_density.append(arr_night_light[x, y])
                population_density.append(arr_population[x, y])
            cluster["built density"] = built_density
            cluster["night light density"] = night_light_density
            cluster["population density"] = population_density
            clusters.append(cluster)

    #NAMING OF CLUSTERS
    #Load a geo matrix from a tiff

    tiff_file = gdal.Open(tiff_cluster)
    geo_matrix = tiff_file.GetGeoTransform()

    #open the csv that stores the different cities and their coordinates
    #Notice that the csv should be decreasinly ordered
    df = pd.read_csv(cities_csv, header=0)

    for c in clusters:
        start = time.time()
        for _, row in df.iterrows():
            city_coordinates = set()
            city_coordinates.add(
                world_to_pixel(geo_matrix, row["long"], row["lat"]))
            if len(c["coordinates"].intersection(city_coordinates)) > 0:
                clusters[clusters.index(c)]["name"] = row["com_nom"]
                clusters[clusters.index(c)]["country"] = row["country"]
                print("OK")
                break
        #some clusters do not match with city
        if "name" not in c:
            clusters[clusters.index(c)]["name"] = "NO WHERE"
            clusters[clusters.index(c)]["country"] = "NO WHERE"
            print("NO WHERE")
        end = time.time()
        print(end - start)
    # output_file should have extension .pkl
    with open(output_file, "wb") as f:
        f.write(pickle.dumps(clusters))

    return clusters
示例#40
0
 def save(self):
     "保存"
     with open('db.pkl', 'wb') as f:
         f.write(pickle.dumps(self.memo_list))
         print("文件保存成功")
示例#41
0
 def test_record_not_pickleable(self):
     r = Record(R_A, (42, ))
     with self.assertRaises(Exception):
         pickle.dumps(r)
示例#42
0
def trainKeras(En, A, Cl, A_test, Cl_test, Root):
    import tensorflow as tf
    
    # Use this to restrict GPU memory allocation in TF
    opts = tf.GPUOptions(per_process_gpu_memory_fraction=sysDef.fractionGPUmemory)
    conf = tf.ConfigProto(gpu_options=opts)
    #conf.gpu_options.allow_growth = True
    
    if kerasDef.useTFKeras:
        import tensorflow.keras as keras  #tf.keras
        tf.Session(config=conf)
    else:
        import keras   # pure keras
        from keras.backend.tensorflow_backend import set_session
        set_session(tf.Session(config=conf))
    
    from sklearn import preprocessing
    from tensorflow.contrib.learn.python.learn import monitors as monitor_lib
    
    tb_directory = "keras_" + str(len(kerasDef.hidden_layers))+"HL_"+str(kerasDef.hidden_layers[0])
    model_directory = "."
    if kerasDef.regressor:
        model_name = model_directory+"/keras_regressor_"+str(len(kerasDef.hidden_layers))+"HL_"+str(kerasDef.hidden_layers[0])+".hd5"
    else:
        model_name = model_directory+"/keras_"+str(len(kerasDef.hidden_layers))+"HL_"+str(kerasDef.hidden_layers[0])+".hd5"
    model_le = model_directory+"/keras_model_le.pkl"
    
    if kerasDef.alwaysRetrain == False:
        print(" Training model saved in: ", model_name, "\n")
    else:
        kerasDef.alwaysImprove = False
        print(" Training model not saved\n")
    
    #**********************************************
    ''' Initialize Estimator and training data '''
    #**********************************************
    print(' Preprocessing data and classes for Keras\n')

    totA = np.vstack((A, A_test))
    totCl = np.append(Cl, Cl_test)

    
    if kerasDef.regressor:
        Cl2 = np.copy(Cl)
        Cl2_test = np.copy(Cl_test)
        le = None
    else:

        numTotClasses = np.unique(totCl).size
        le = preprocessing.LabelEncoder()
        totCl2 = le.fit_transform(totCl)
        Cl2 = le.transform(Cl)
        Cl2_test = le.transform(Cl_test)

        totCl2 = keras.utils.to_categorical(totCl2, num_classes=np.unique(totCl).size)
        Cl2 = keras.utils.to_categorical(Cl2, num_classes=np.unique(totCl).size+1)
        Cl2_test = keras.utils.to_categorical(Cl2_test, num_classes=np.unique(totCl).size+1)
        print(" Label Encoder saved in:", model_le)
        with open(model_le, 'ab') as f:
            f.write(pickle.dumps(le))
    
    if kerasDef.fullBatch == True:
        batch_size = A.shape[0]
    else:
        batch_size = kerasDef.batchSize

    printParamKeras(A)

    if kerasDef.alwaysImprove == True or os.path.exists(model_name) is False:
        model = keras.models.Sequential()
        for numLayers in kerasDef.hidden_layers:
            model.add(keras.layers.Dense(numLayers,
                    activation = kerasDef.activation_function,
                    input_dim=A.shape[1],
                    kernel_regularizer=keras.regularizers.l2(kerasDef.l2_reg_strength)))
            model.add(keras.layers.Dropout(kerasDef.dropout_perc))

        if kerasDef.regressor:
            model.add(keras.layers.Dense(1))
            model.compile(loss='mse',
                optimizer=kerasDef.optimizer,
                metrics=['mae'])
        else:
            model.add(keras.layers.Dense(np.unique(totCl).size+1, activation = 'softmax'))
            model.compile(loss='categorical_crossentropy',
              optimizer=kerasDef.optimizer,
              metrics=['accuracy'])

        tbLog = keras.callbacks.TensorBoard(log_dir=tb_directory, histogram_freq=kerasDef.tbHistogramFreq,
                batch_size=batch_size, write_graph=True, write_grads=True, write_images=True)
        #tbLog.set_model(model)
        tbLogs = [tbLog]
        log = model.fit(A, Cl2,
            epochs=kerasDef.trainingSteps,
            batch_size=batch_size,
            callbacks = tbLogs,
            verbose = 2,
            validation_data=(A_test, Cl2_test))

        loss = np.asarray(log.history['loss'])
        val_loss = np.asarray(log.history['val_loss'])

        if kerasDef.regressor:
            accuracy = None
            val_acc = None
        else:
            accuracy = np.asarray(log.history['acc'])
            val_acc = np.asarray(log.history['val_acc'])

        model.save(model_name)

        if kerasDef.plotModel == True:
            from keras.utils import plot_model
            keras.utils.plot_model(model, to_file=model_directory+'/keras_MLP_model.png', show_shapes=True)
            
            import matplotlib.pyplot as plt
            plt.figure(tight_layout=True)
            plotInd = int(len(kerasDef.hidden_layers))*100+11
            visibleX = True
            for layer in model.layers:
                try:
                    w_layer = layer.get_weights()[0]
                    ax = plt.subplot(plotInd)
                    newX = np.arange(En[0], En[-1], (En[-1]-En[0])/w_layer.shape[0])
                    plt.plot(En, np.interp(En, newX, w_layer[:,0]), label=layer.get_config()['name'])
                    plt.legend(loc='upper right')
                    plt.setp(ax.get_xticklabels(), visible=visibleX)
                    visibleX = False
                    plotInd +=1
                except:
                    pass

            plt.xlabel('Raman shift [1/cm]')
            plt.legend(loc='upper right')
            plt.savefig('keras_weights_MLP' + '.png', dpi = 160, format = 'png')  # Save plot

        printModelKeras(model)

        print("\n  Number of spectra = ",A.shape[0])
        print("  Number of points in each spectra = ", A.shape[1])
        if kerasDef.regressor == False:
            print("  Number unique classes (training): ", np.unique(Cl).size)
            print("  Number unique classes (validation):", np.unique(Cl_test).size)
            print("  Number unique classes (total): ", np.unique(totCl).size)

        printParamKeras(A)
        printTrainSummary(accuracy, loss, val_acc, val_loss)
        
    else:
        print(" Retreaving training model from: ", model_name,"\n")
        model = keras.models.load_model(model_name)
        printModelKeras(model)
        printParamKeras(A)
    
    score = model.evaluate(A_test, Cl2_test, batch_size=batch_size)
    printEvalSummary(model_name, score)
    return model, le
示例#43
0
 def _encode_item(self, item):
     return sqlite.Binary(pickle.dumps(item, -1))