def __eq__(self, obj): if obj.__class__ is not self.__class__: return False if hasattr(self, '__getstate__'): return obj.__getstate__() == self.__getstate__() # pylint: disable=no-member return _dumps(self) == _dumps(obj)
def post_proc(tree, captured_registry, gen_sym, **kw): if len(captured_registry) == 0: return tree unpickle_name = gen_sym("unpickled") with q as pickle_import: from pickle import _loads as x # noqa: F401 pickle_import[0].names[0].asname = unpickle_name import pickle syms = [ast.Name(id=sym) for val, sym in captured_registry] vals = [val for val, sym in captured_registry] with q as stored: ast_list[syms] = name[unpickle_name](u[pickle._dumps(vals)]) from .cleanup import ast_ctx_fixer stored = ast_ctx_fixer.recurse(stored) tree.body = (list(map(ast.fix_missing_locations, pickle_import + stored)) + tree.body) return tree
def toFile(self, filename): """ Save the suffix array instance including all features attached in filename. Accept any filename following the _open conventions, for example if it ends with .gz the file created will be a compressed GZip file. """ start = _time() fd = _open(filename, "w") savedData = [self.string, self.unit, self.voc, self.vocSize, self.SA, self.features] for featureName in self.features: featureValues = getattr(self, "_%s_values" % featureName) featureDefault = getattr(self, "%s_default" % featureName) savedData.append((featureValues, featureDefault)) fd.write(_dumps(savedData, _HIGHEST_PROTOCOL)) fd.flush() try: self.sizeOfSavedFile = getsize(fd.name) except OSError: # if stdout is used self.sizeOfSavedFile = "-1" self.toFileTime = _time() - start if _trace: print >> _stderr, "toFileTime %.2fs" % self.toFileTime if _trace: print >> _stderr, "sizeOfSavedFile %sb" % self.sizeOfSavedFile fd.close()
def save(self, name): """ Save current labeled image to persistent storage. """ self.name = name pickle_dict = {'ImBytes': self.Image, 'Rules': []} for idx, rule in enumerate(self.Rules): m_expr, shape, action = rule rule_dict = { 'expr_type': m_expr._type, 'expr': m_expr.expr, 'shape_type': shape._type, 'radius': shape.radius, 'points': [], 'action': action, 'position': idx, } for idp, point in enumerate(shape.points): point_dict = { 'x1': point[0], 'y1': point[1], 'position': idp, } rule_dict['points'].append(point_dict) pickle_dict['Rules'].append(rule_dict) obj = pickle._dumps(pickle_dict, protocol=pickle.HIGHEST_PROTOCOL) LabeledImage.db_cursor.execute( 'INSERT INTO LabeledImage Values(NULL, ?, ?, ?)', (obj, name, self.owner))
def toFile(self, filename): """ Save the suffix array instance including all features attached in filename. Accept any filename following the _open conventions, for example if it ends with .gz the file created will be a compressed GZip file. """ start = _time() fd = _open(filename, "w") savedData = [ self.string, self.unit, self.voc, self.vocSize, self.SA, self.features ] for featureName in self.features: featureValues = getattr(self, "_%s_values" % featureName) featureDefault = getattr(self, "%s_default" % featureName) savedData.append((featureValues, featureDefault)) fd.write(_dumps(savedData, _HIGHEST_PROTOCOL)) fd.flush() try: self.sizeOfSavedFile = getsize(fd.name) except OSError: #if stdout is used self.sizeOfSavedFile = "-1" self.toFileTime = _time() - start if _trace: print >> _stderr, "toFileTime %.2fs" % self.toFileTime if _trace: print >> _stderr, "sizeOfSavedFile %sb" % self.sizeOfSavedFile fd.close()
def compat_dumps(obj, protocol=None, fix_imports=True, buffer_callback=None): return pickle._dumps( obj, protocol=DEFAULT_PROTOCOL, fix_imports=fix_imports, buffer_callback=buffer_callback, )
def dumps(self, obj, protocol=None, *, fix_imports=True): if protocol is None: protocol = self._pickle_protocol if self._pickle_module == "P": return pickle._dumps(obj, protocol=protocol, fix_imports=fix_imports) elif self._pickle_module == "C": return pickle.dumps(obj, protocol=protocol, fix_imports=fix_imports) raise ValueError("Invalid pickle module")
def push_params_redis(model): i = -1 keys = [] values = [] for param in list(model.parameters()): i = i + 1 param_data = param.data.numpy() p = pc._dumps(param_data, protocol=pc.HIGHEST_PROTOCOL) keys.append(i) values.append(p) asyncio.get_event_loop().run_until_complete( multi_set_key_redis(keys, values))
def Comunicate(s, addr): # Adds new players to Information and removes them when they disconnect global Information global done dis = False name = s.recv(1024).decode() l = len(Information) Information.append([None, None]) T = random.randint(0,2) All.append([l, T, name, 0]) while (not done) and (not dis): try: s.send(pickle._dumps(Information[l][0])) Information[l][1] = pickle.loads(s.recv(1024)) except: s.close() Information[l] = ["disconnected", "disconnected"] dis = True
def get_users_audio(self, session, vk_page): result = cache.get(str(vk_page)) if result: logger.info('return from cache') return pickle.loads(result) vkaudio = VkAudio(session) all_audios = vkaudio.get(owner_id=vk_page) logger.info('got {} audios'.format(len(all_audios))) if all_audios: all_audios = pd.DataFrame(all_audios) all_audios['user_id'] = vk_page all_audios[['user_id', 'title', 'artist']].to_csv(DATASET_PATH, mode='a', index=None, header=None) all_audios = list(all_audios['artist'])[::-1] cache.setex(name=str(vk_page), value=pickle._dumps(all_audios), time=CACHE_LIFETIME) return all_audios
def get_voter_pk(self): return pickle._dumps(self.voterpubkey)
def sk_to_data(privatekey, pin): key = pk._dumps(privatekey) key = str(key)[2:-1] ls = [str(pin), key] data = '****'.join(element for element in ls) return data
def _obj_to_pkl_string(o): return _dumps(o, protocol=_hp)
def wrap(message): data = pickle._dumps(message) return data
def assert_dict_of_unpickled_is_the_same(original_obj): pickled = pickle._dumps(original_obj) unpickled = pickle.loads(pickled) assert dict(unpickled) == dict(original_obj)
def serialize_data(data): return pickle._dumps(data)
def compat_dumps(obj, protocol=None, fix_imports=True, **kwargs): return pickle._dumps(obj, protocol=DEFAULT_PROTOCOL, fix_imports=fix_imports, **kwargs)
def start(self): server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind((host, port)) server.listen(backlog) input = [ server, ] var = 1 for x in range(2): inputready, outputready, exceptready = select.select(input, [], []) for s in inputready: if s == server: client, address = server.accept() input.append(client) print('new client added%s' % str(address)) input[var].send(str.encode("I see YOU")) var += 1 gameObject = TicTacToe.TicTacToe() var = random.randint(0, 1) print(var) # while 1: # try: # input[1].send(pickle._dumps("chose game")) # cos = input[1].recv(10240) # a = ( cos.decode() ) # print(a) # except Exception as error: # print(error) while 1: input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.gameInformationDoNotExpectResponse, gameObject.getBoard()))) input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.informationRequireResponse, "next move ?"))) try: first, second = map( int, pickle.loads(input[var % 2 + 1].recv(10240)).getData().split()) except Exception as error: input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.informationRequireResponse, "write proper position"))) continue anser = gameObject.nextMove([first, second]) input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.gameInformationDoNotExpectResponse, gameObject.getBoard()))) var += 1 if enums.game_state.game_is_not_finished == anser: continue if enums.game_state.draw == anser: input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.finalMessageFromTheGame, enums.game_state.draw))) input[(var - 1) % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.finalMessageFromTheGame, enums.game_state.draw))) print("draw") break elif anser in {enums.game_state.o_won, enums.game_state.x_won}: input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage. gameInformationDoNotExpectResponse, gameObject.getBoard()))) input[(var - 1) % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage. gameInformationDoNotExpectResponse, gameObject.getBoard()))) input[(var - 1) % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.finalMessageFromTheGame, anser))) input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage.finalMessageFromTheGame, anser))) print(anser.value) break else: var -= 1 input[var % 2 + 1].send( pickle._dumps( message.message( enums.typeOfMessage. gameInformationDoNotExpectResponse, anser)))
def test_pickle(self): logger = TestLogger(log_level=logging.ERROR) logger = pickle._loads(pickle._dumps(logger)) self.assertIsInstance(logger._log, logging.Logger) self.assertEqual(logger._log.level, logging.ERROR)
#coding:utf-8 __author__ = 'SuHan' ''' 序列化就是将内存中的数据写入磁盘,反之就叫做反序列化 python中称之叫做pickling 只有序列化之后数据才可以被写入磁盘 ''' import pickle d = dict(name = 'suhan', age = 20 , score = 88) f = open('112.txt','wb') f.write(pickle._dumps(d)) f.close() f = open('112.txt','r+b') d = pickle.load(f) print(d) #这样的话,不用等到文件关闭,直接就可以把数据写进磁盘,这就是序列化的作用 #经过这种方法,虽然可以直接写入序列化后的文件,但是文件里面的内容是一些二进制 #的输出,所以我们在写入文件后,还应该对文件内容进行反序列化 ''' 将python格式序列化为JSON格式 JSON的格式可以被所有语言来读取,并且也可以被直接存储到磁盘当中 ''' import json d = dict(name = 'suhan01',age = 20,score = 88) print(d) print(json.dumps(d))
def update_event(self, inp=-1): self.set_output_val(0, pickle._dumps(self.input(0), self.input(1)))
def request(self, request): logging.debug('Sending %s to %s', request, self.port) request = pickle._dumps((self.src, request)) self.socket.sendto(request, ('localhost', self.port))
import pickle d = dict(name='bob', age=18, score=90) print(d) data = pickle._dumps(d) print(data) reborn = pickle.loads(data) print(reborn)
keys = {} keys['private'], keys['public'] = enc.rsakeys() # # class vote: # # def __init__(self,hiddenvoterid,candidateID,pubkey): # #--voterid hashed with PIN (ZKP) # self.hiddenvoterid = hiddenvoterid # self.candidate = candidateID # self.pubkey = pubkey # self.time = time.time() # self.votedata = [self.hiddenvoterid, self.candidate, self.time] # # v1 = vote('7112a70d9ef40de8a89ed2845cae954901aa6a67200e29bcc9344a0bbdb8f35a',1,keys['public']) #print(keys['public']) keyobj = pk._dumps(keys['public']) print(keyobj) ls = [ b'C6yOrzSFsfy4bQ172sS2PRmpTmGa8euo+xg', b'rTDAVInfyDn+WO72sS2PRmpTmGykx74Kz/HC4=' ] ls.append(str(keyobj)[2:-1]) jsondict = {'pubkey': ls[2], 'data': ls[0], 'key': ls[1]} # jd = js.dumps(jsondict) ds = js.loads(jd) vpubkey = bytes(ds['pubkey'], 'utf-8') vpubkeyORIGINAL = vpubkey.decode('unicode-escape').encode('ISO-8859-1')