def test_decimal(self): # Default behavior: Decimal is preserved obj = decimal.Decimal(0.5) as_json = jsonpickle.dumps(obj) clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal)) self.assertEqual(obj, clone) # Custom behavior: we want to use simplejson's Decimal support. jsonpickle.set_encoder_options('simplejson', use_decimal=True, sort_keys=True) jsonpickle.set_decoder_options('simplejson', use_decimal=True) # use_decimal mode allows Decimal objects to pass-through to simplejson. # The end result is we get a simple '0.5' value as our json string. as_json = jsonpickle.dumps(obj, unpicklable=True, use_decimal=True) self.assertEqual(as_json, '0.5') # But when loading we get back a Decimal. clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal)) # side-effect: floats become decimals too! obj = 0.5 as_json = jsonpickle.dumps(obj) clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal))
def initFromJSON(self, jsSrc): if self.source is "FILE": self.ldd = jsonpickle.loads(jsSrc.read()) if self.source is "NETWORK": self.ldd = jsonpickle.loads(jsSrc) self.newCal = Calendar(self.ldd["myUID"]) evtList = list(map(lambda cvt: genCalEvt(cvt), self.ldd["caltxts"])) for evt in evtList: self.newCal.addEntry(evt) return self.newCal
def _main(): arguments_json_path = sys.argv[1] with open(arguments_json_path) as f: arguments = jsonpickle.loads(f.read()) # arguments_json_path is a temporary file created by the parent process. # so we remove it here os.remove(arguments_json_path) task_id = arguments['task_id'] port = arguments['port'] messenger = _Messenger(task_id=task_id, port=port) messenger.started() operation_mapping = arguments['operation_mapping'] operation_inputs = arguments['operation_inputs'] context_dict = arguments['context'] # This is required for the instrumentation work properly. # See docstring of `remove_mutable_association_listener` for further details storage_type.remove_mutable_association_listener() with instrumentation.track_changes() as instrument: try: ctx = serialization.operation_context_from_dict(context_dict) _patch_session(ctx=ctx, messenger=messenger, instrument=instrument) task_func = imports.load_attribute(operation_mapping) aria.install_aria_extensions() for decorate in process_executor.decorate(): task_func = decorate(task_func) task_func(ctx=ctx, **operation_inputs) messenger.succeeded(tracked_changes=instrument.tracked_changes) except BaseException as e: messenger.failed(exception=e, tracked_changes=instrument.tracked_changes)
def test_sqlalchemy_with_dynamic_table(self): """Test creating a table dynamically, per #180""" if self.should_skip: return self.skip('sqlalchemy is not installed') meta = sqa.MetaData() expect = sqa.Table( 'test', meta, sqa.Column('id', sqa.Integer()), sqa.Column('text', sqa.Text())) jsonstr = jsonpickle.dumps(expect) actual = jsonpickle.loads(jsonstr) self.assertEqual(expect.__class__, actual.__class__) self.assertEqual(expect.name, actual.name) # These must be unique instances self.assertNotEqual(expect.metadata, actual.metadata) # Columns names must exactly match self.assertEqual(sorted(expect.columns.keys()), sorted(actual.columns.keys())) # As should the types self.assertEqual(expect.c.id.name, actual.c.id.name) self.assertEqual(expect.c.id.type.__class__, actual.c.id.type.__class__) self.assertEqual(expect.c.text.name, actual.c.text.name) self.assertEqual(expect.c.text.type.__class__, actual.c.text.type.__class__)
def get(self, request): # 1.插入Order表中数据 # 获取请求参数 import uuid, datetime # print(request.GET.get('payway')) # print request.GET.get('address', '') date = { "out_trade_num": uuid.uuid4().get_hex(), "order_num": datetime.datetime.today().strftime('%Y%m%d%H%M%S'), "payway": request.GET.get('payway'), "address": Address.objects.get(id=request.GET.get('address', '')), "user": request.session.get('user', ''), } orderObj = Order.objects.create(**date) # 2.插入OrderItem表中数据 cartitems = jsonpickle.loads(request.GET.get('cartitems')) orderItemList = [OrderItem.objects.create(order=orderObj, **item) for item in cartitems if item] totalPrice = request.GET.get('totalPrice')[1:] # 获取扫码支付请求参数 params = alipay.direct_pay(subject='京东超市', out_trade_no=orderObj.out_trade_num, total_amount=str(totalPrice)) # 获取扫码支付的请求地址 url = alipay.gateway + "?" + params return redirect(url)
def mainfunc(request): user = request.session.get('user','') if user: user = jsonpickle.loads(user) return {"info":user}
def lineReceived(self, line): try: line = line.decode() log.debug(f"render | out | {line}") response = jsonpickle.loads(line) except Exception as e: response = {} #: Special case for startup response_id = response.get('id') if response_id == 'window_id': self.window_id = response['result'] self.restarts = 0 # Clear the restart count elif response_id == 'render_error': self.errors = response['error']['message'] elif response_id == 'render_ok': self.errors = "" elif response_id == 'capture_output': # Script output capture it self.output = response['result'].split("\n") elif response_id is not None: # Lookup the deferred object that should be stored for this id # when it is called and invoke the callback or errback based on the # result d = self._responses.get(response_id) if d is not None: del self._responses[response_id] error = response.get('error') if error is not None: d.errback(error) else: d.callback(response.get('result')) else: # Append to output self.output.append(line)
def test_timedelta(self): if self.should_skip: return self.skip('bson is not installed') td = datetime.timedelta(-1, 68400) serialized = jsonpickle.dumps(td) restored = jsonpickle.loads(serialized) self.assertEqual(restored, td)
def test_nested_objects(self): if self.should_skip: return self.skip('bson is not installed') o = Object(99) serialized = jsonpickle.dumps(o) restored = jsonpickle.loads(serialized) self.assertEqual(restored.offset, datetime.timedelta(99))
def test_FixedOffsetSerializable(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60 * 5, 'EST') serialized = jsonpickle.dumps(fo) restored = jsonpickle.loads(serialized) self.assertEqual(vars(restored), vars(fo))
def set_state(self, state): state = jsonpickle.loads(state.decode('utf-8')) for key, value in six.iteritems(state): try: self._objects[key].set_state(value) except KeyError: system_log.warn('core object state for {} ignored'.format(key))
def update(self): if os.path.isfile(self.path + '/job.json'): with open(self.path + '/job.json') as f: out_job = jsonpickle.loads(f.read()) self.__dict__.update(out_job.__dict__) else: self.save()
def load_servers(self): resp = urllib2.urlopen("http://qsrdk.daili666api.com/ip/?tid=558571735483018&num=100&delay=3&category=2&protocol=http&foreign=only&format=json") servers = jsonpickle.loads(resp.read()) for sver in servers: server = ProxyServer(sver['host'],sver['port']) print("load " + str(server) + " into manager") self.load_server(server)
def from_dict(cls, node_dict): """ Creates a node from a dict representation :param node_dict: dict :return: datamodel.base.node.Node """ # import classes, so we can instantiate them at need import robograph.datamodel # retrieve the class object so we can instantiate the node klass = eval(node_dict['class']) node = klass(name=node_dict['name']) # now retrieve the parameters parameters = dict() for p in node_dict['params']: parameters[p] = node_dict['params'][p] # are we deserializing a pickled function? if isinstance(node_dict['params'][p], unicode): if "py/bytes" in node_dict['params'][p]: parameters[p] = cloudpickle.loads(jsonpickle.loads(node_dict['params'][p])) node.input(parameters) node.set_output_label(node_dict['output_label']) return node
def get(self, requset): # 获取请求参数 import uuid, datetime data = { 'out_trade_num': uuid.uuid4().hex, 'order_num': datetime.datetime.today().strftime('%Y%m%d%H%M%S'), 'pyway': requset.GET.get('payway'), 'address': Address.objects.get(id=requset.GET.get('address', '')), 'user': requset.session.get('user', '') } # 1.插入order表数据 orderObj = Order.objects.create(**data) # 2.插入item表数据 cartItems = jsonpickle.loads(requset.GET.get('cartitems')) orderItemList = [ OrderItem.objects.create(order=orderObj, **item) for item in cartItems if item ] totalPriceId = requset.GET.get('totalPriceId')[1:] # 获取扫码支付页面 params = alipay.direct_pay(subject='京东超市', out_trade_no=orderObj.out_trade_num, total_amount=str(totalPriceId)) # 拼接请求地址 url = alipay.gateway + '?' + params return HttpResponseRedirect(url)
def test_simple(self): print self.segment.byte_bounds_offset(), len(self.segment) r2 = self.segment.rawdata[100:400] s2 = DefaultSegment(r2) print s2.byte_bounds_offset(), len(s2), s2.__getstate__() r3 = s2.rawdata[100:200] s3 = DefaultSegment(r3) print s3.byte_bounds_offset(), len(s3), s3.__getstate__() order = list(reversed(range(700, 800))) r4 = self.segment.rawdata.get_indexed(order) s4 = DefaultSegment(r4) print s4.byte_bounds_offset(), len(s4), s4.__getstate__() slist = [s2, s3, s4] for s in slist: print s j = jsonpickle.dumps(slist) print j slist2 = jsonpickle.loads(j) print slist2 for s in slist2: s.reconstruct_raw(self.segment.rawdata) print s for orig, rebuilt in zip(slist, slist2): print "orig", orig.data[:] print "rebuilt", rebuilt.data[:] assert np.array_equal(orig[:], rebuilt[:])
def test_FixedOffsetSerializable(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60*5, 'EST') serialized = jsonpickle.dumps(fo) restored = jsonpickle.loads(serialized) self.assertEqual(vars(restored), vars(fo))
def __getitem__(self, key: str) -> Optional[Any]: GET_ITEM = f'SELECT value FROM "{self.tablename}" WHERE key = (?)' item = self.con.execute(GET_ITEM, (key, )).fetchone() if item: return jsonpickle.loads(item[0]) else: return None
def get(self, request): # 获取请求参数 cartitems = request.GET.get('cartitems', '') # 将json格式字符串转换成python对象 cartitemsList = jsonpickle.loads("[" + cartitems + "]") # 将python对象转换成cartitems对象列表 cartitemObjList = [ getCartManger(request).get_cartitems(**item) for item in cartitemsList if item ] # 1.获取用户默认收货地址 address = request.session.get('user').address_set.get(isdefault=True) # 2.获取支付总金额 totalPrice = 0 for cm in cartitemObjList: totalPrice += cm.getTotalPrice() return render( request, 'order.html', { 'cartitemList': cartitemObjList, 'address': address, 'totalPrice': totalPrice })
def sign_in_user(self, email, password): try: user = auth.sign_in_with_email_and_password(email, password) auth.get_account_info(user['idToken']) user_id = auth.get_account_info( user['idToken'])['users'][0]['localId'] private_key = list( db.child('users').child(user_id).get( user['idToken']).val().values())[0] self.privateKey = private_key self.address = self.address_gen() self.publicKey = keys.get_public_key(self.privateKey, curve.P256) return True except Exception as e: print(jsonpickle.loads(e.args[1])['error']['message']) return jsonpickle.loads(e.args[1])['error']['message']
def getLoginUserInfo(request): """获取登录用户对象信息""" user = request.session.get('user', '') if user: user = jsonpickle.loads(user) return {'loginUser': user}
def test_attach_nic_to_net_success(self): """ :return: """ import jsonpickle mock_client = Mock() test_nova_instance_service.novaclient.Client = Mock(return_value=mock_client) mock_instance = Mock() mock_iface_attach_result = Mock() mock_instance.interface_attach = Mock(return_value=mock_iface_attach_result) expected_test_mac = 'test_mac_address' expected_port_id = 'test_port_id' expected_ip_address = 'test_ip_address' mock_result_dict = {'mac_addr': expected_test_mac, 'port_id': expected_port_id, 'fixed_ips': [{'ip_address': expected_ip_address}]} mock_iface_attach_result.to_dict = Mock(return_value=mock_result_dict) self.instance_service.get_instance_from_instance_id = Mock(return_value=mock_instance) result = self.instance_service.attach_nic_to_net(openstack_session=self.openstack_session, net_id='test_net_id', instance_id='test_instance_id', logger=self.mock_logger) expected_result_dict = {'ip_address': expected_ip_address, 'port_id': expected_port_id, 'mac_address': expected_test_mac} self.assertEqual(jsonpickle.loads(result), expected_result_dict)
def callback(ch, method, properties, body): try: print("{} - RabbitMQ message received - begin processing".format(hostname), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="RabbitMQ message received - begin processing") print (jsonpickle.loads(body)) fileName = jsonpickle.loads(body)['fileName'] imgHash = jsonpickle.loads(body)['imgHash'] imgData = pickle.loads(codecs.decode(jsonpickle.loads(body)['imgData'].encode(), "base64")) redisNameToHash.set(fileName, imgHash) # filename -> hash DB1 print("Entry added to DB1 - redisNameToHash: <{}><{}>".format(fileName, imgHash), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB1 - redisNameToHash") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB1: <{}><{}>".format(fileName, imgHash)) redisHashToName.sadd(imgHash, fileName) # hash -> [filename] DB2 print("Entry added to DB2 - redisHashToName: <{}><{}>".format(imgHash, redisHashToName.smembers(imgHash)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB2 - redisHashToName") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB2: <{}><{}>".format(imgHash, redisHashToName.smembers(imgHash))) # Load the received image file img = face_recognition.load_image_file(imgData) # Get face encodings for any faces in the received image unknown_face_encodings = face_recognition.face_encodings(img) if len(unknown_face_encodings) > 0: for i in unknown_face_encodings: redisHashToFaceRec.sadd(imgHash, jsonpickle.dumps(i)) # hash -> [face encodings] DB3 print("Entry added to DB3 - redisHashToFaceRec: <{}><{}>".format(imgHash, redisHashToFaceRec.smembers(imgHash)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB3 - redisHashToFaceRec") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB3: <{}><{}>".format(imgHash, redisHashToFaceRec.smembers(imgHash))) for key in redisHashToFaceRec.scan_iter(): known_face_encodings = [jsonpickle.loads(j) for j in redisHashToFaceRec.smembers(key)] if any(face_recognition.compare_faces(known_face_encodings, i)): redisHashToHashSet.sadd(imgHash, key) # hash -> [hashes] DB4 redisHashToHashSet.sadd(key, imgHash) # hash -> [hashes] DB4 print("Entries added to DB4 - redisHashToHashSet: <{}><{}>, <{}><{}>".format(imgHash, redisHashToHashSet.smembers(imgHash), key, redisHashToHashSet.smembers(key)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entries added to DB4 - redisHashToHashSet") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB4: <{}><{}>, <{}><{}>".format(imgHash, redisHashToHashSet.smembers(imgHash), key, redisHashToHashSet.smembers(key))) print("{} - RabbitMQ message processed and acknowledged".format(hostname), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="RabbitMQ message processed and acknowledged") ch.basic_ack(delivery_tag=method.delivery_tag) except Exception as e: #send negative ack print ("Error in callback: {}".format(e)) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="Error in callback: {}".format(e)) channel.basic_nack(delivery_tag=method.delivery_tag)
def post(self,request): # 判断用户是否登录 if 'user' not in request.session: return redirect('/user/login/') user = jsonpickle.loads(request.session.get('user')) # 获取 sku_ids sku_ids = request.POST.getlist('sku_ids') # 判断 sku_ids 是否存在 if not sku_ids: return redirect('/cart/info/') # 连接 redis con = get_redis_connection('default') cart_key = 'cart_%d'%user.id skus = [] total_count = 0 total_price = 0 for sku_id in sku_ids: # 获取商品信息 sku = GoodsSKU.objects.get(id=sku_id) # 获取商品数量 count = con.hget(cart_key,sku_id) # 获取商品小计 amount = int(count) * sku.price # 动态添加商品数量 sku.count = int(count) # 动态添加商品小计 sku.amount = amount # 追加 skus.append(sku) # 获取商品总数量 total_count += int(count) # 获取商品总价格 total_price += amount # 获取商品运费 transit_price = 10 # 获取商品实付价格 total_pay = total_price + transit_price # 获取地址信息 address = Address.objects.filter(user=user) # 处理sku_ids的数据类型 sku_ids = ','.join(sku_ids) # 组织上下文 context = { 'sku_ids':sku_ids, 'skus':skus, 'total_count':total_count, 'total_prece':total_price, 'transit_price':transit_price, 'total_pay':total_pay, 'address':address } return render(request,'place_order.html',context)
def post(self,request): # 判断用户是否登录 if 'user' not in request.session: return JsonResponse({'res': 0, 'errmsg': '请先登录'}) user = jsonpickle.loads(request.session.get('user')) # 获取参数 order_id = request.POST.get('order_id') # 校验数据 if not order_id: return JsonResponse({'res': 1, 'errmsg': '无效的订单ID'}) # 查看订单是否存在 try: order = OrderInfo.objects.get(user=user, pay_method=3, order_status=1, order_id=order_id) except OrderInfo.DoesNotExist: return JsonResponse({'res': 2, 'errmsg': '订单无效'}) # 使用python.SDK 连接支付宝接口 # 初始化 # 业务处理:条用python.sdk条用支付宝的支付接口 # 初始化 alipay = AliPay( appid="2016093000634740", # APPID 支付宝沙箱 app_notify_url=None, # 默认回调url # 应用私钥 app_private_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)), 'app_private_key.pem'), # 支付宝公钥 alipay_public_key_path=os.path.join(os.path.dirname(os.path.abspath(__file__)), 'alipay_public_key.pem'), sign_type='RSA2', # RSA 或者 RSA2 debug=True # 默认False ) # 调用支付宝订单查询接口 while True: response = alipay.api_alipay_trade_query(order_id) code = response.get('code') trade_status = response.get('trade_status') if code == '10000' and trade_status == 'TRADE_SUCCESS': # 支付成功 # 获取支付宝交易号 trade_on = response.get('trade_no') # 更新订单状态 order.trade_on = trade_on order.order_status = 4 # 待评价 order.save() # 返回结果 return JsonResponse({'res': 3, 'message': '支付成功'}) elif code == '40004' or code == '10000' and trade_status == 'WAIT_BUYER_PAY': # 业务处理失败,可能一会就能成功 # 等待买家付款 import time time.sleep(3) continue else: # 支付出错 return JsonResponse({'res': 4, 'errmsg': '支付失败'})
def line_received(self, line): """ Called when a newline is received Parameters ---------- line: String The data """ if not line: return try: request = jsonpickle.loads(line) except Exception as e: return self.send_message({ 'id': None, 'error': { 'code': -32700, 'message': f'Parse error: "{line}"' } }) request_id = request.get('id') method = request.get('method') if method is None: return self.send_message({ "id": request_id, "error": { 'code': -32600, 'message': "Invalid request" } }) handler = getattr(self, 'handle_{}'.format(method), None) if handler is None: msg = f"Method '{method}' not found" return self.send_message({ "id": request_id, 'error': { 'code': -32601, 'message': msg } }) try: params = request.get('params', []) if isinstance(params, dict): result = handler(**params) else: result = handler(*params) return self.send_message({'id': request_id, 'result': result}) except Exception as e: return self.send_message({ "id": request_id, 'error': { 'code': -32500, 'message': traceback.format_exc() } })
def post(self, request): aname = request.POST.get('user-name') phone = request.POST.get('user-phone') address = request.POST.get('detailarea') user_obj = jsonpickle.loads(request.session.get('user')) if Address.objects.filter(account_id=user_obj.id): isdel = 0 else: isdel = 1 account = jsonpickle.loads(request.session.get('user')) Address.objects.create(aname=aname, phonenum=phone, addr=address, account=account, isdefault=isdel) return redirect('addr')
def test_datetime_with_fixed_offset(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60 * 5, 'EST') dt = datetime.datetime.now().replace(tzinfo=fo) serialized = jsonpickle.dumps(dt) restored = jsonpickle.loads(serialized) self.assertEqual(restored, dt)
def messageFromJSON(JSON): partlog = [] content = jsonpickle.loads(JSON) for userevt in content['partLog']: partlog.append(UserEventfromJSON(userevt)) return Message(content['sourceID'],content['destID'],content['tt'],partlog)
def update(self,goodsid,colorid,sizeid,step,*args,**kwargs): key = self.__get_key(goodsid,colorid,sizeid) if key in self.session[self.cart_name]: cartitem = jsonpickle.loads(self.session[self.cart_name][key]) cartitem.count = int(str(cartitem.count))+int(step) else: raise Exception('SessionManager中的update出错了')
def _res(self, r): try: return jsonpickle.loads(r.json()) except: try: return r.json() except: return r.content.decode()
def __init__(self, cache_file=None): self.cache_file = cache_file or str( Path.home()) + '/.sciqlopcache/db.json' if os.path.exists(self.cache_file): with open(self.cache_file, 'r') as f: self._data = jsonpickle.loads(f.read()) else: self._data = {}
def _recv_message(connection): # Retrieving the length of the msg to come. def _unpack(conn): return struct.unpack(_INT_FMT, _recv_bytes(conn, _INT_SIZE))[0] msg_metadata_len = _unpack(connection) msg = _recv_bytes(connection, msg_metadata_len) return jsonpickle.loads(msg)
def get(self, request): #获取当前登录用户下的收货地址信息 # 获取当前登录用户对象 userstr = request.session.get('user', '') if userstr: user = jsonpickle.loads(userstr) addr_list = user.address_set.all() return render(request, 'address.html', {'addr_list': addr_list})
def test_get_encoded_attachments_file(self): class TestNotification(EmailNotification): attachments = [File(open('tests/python.jpeg', 'rb'))] attachments = jsonpickle.loads( TestNotification()._get_encoded_attachments()) self.assertEqual(attachments[0][0], 'tests/python.jpeg') self.assertEqual(attachments[0][2], 'image/jpeg')
def main(request): #从session中获取用户名 user=request.session.get('user','') if user: u=jsonpickle.loads(user) uname=u.uname return HttpResponse(u'欢迎%s登陆成功'%uname)
def load_covenant_from_file(path): import jsonpickle with open(path, "r") as f: covenant = jsonpickle.loads(f.read()) print(f"Covenant successfully loaded from {path}") return covenant
def load(): try: with open(DEFAULT_PATH, "r") as file: obj = jsonpickle.loads(file.read()) return obj except IOError as e: raise IOError( "Could not load any password from previous sessions.")
def test_datetime_with_fixed_offset_incremental(self): """Test creating an Unpickler and incrementally encoding""" if self.should_skip: return self.skip('bson is not installed') obj = datetime.datetime( 2019, 1, 29, 18, 9, 8, 826000, tzinfo=bson.tz_util.utc) doc = jsonpickle.dumps(obj) # Restore the json using a custom unpickler context. unpickler = jsonpickle.unpickler.Unpickler() jsonpickle.loads(doc, context=unpickler) # Incrementally restore using the same context clone = json.loads( doc, object_hook=lambda x: unpickler.restore(x, reset=False)) self.assertEqual(obj.tzinfo.__reduce__(), clone.tzinfo.__reduce__())
def check_for_saved_bot(): if os.path.exists(BotFilesManager.bot_resources_file_name): file = open(BotFilesManager.bot_resources_file_name) data = file.read() file.close() return jsonpickle.loads(data) else: return None
def test_datetime_with_fixed_offset(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60*5, 'EST') dt = datetime.datetime.now().replace(tzinfo=fo) serialized = jsonpickle.dumps(dt) restored = jsonpickle.loads(serialized) self.assertEqual(restored, dt)
def test_next_runfolder(self): path = self._create_ready_runfolder() self.assertTrue(self._exists(path)) response = self.get("./runfolders/next", expect=200) response_json = jsonpickle.loads(response.text) self.assertEqual(response_json["path"], path) self.assertEqual(response_json["state"], State.READY) # Remove the path created, so it does not interfere with other tests shutil.rmtree(path)
def state_list(self): """ :rtype: list(WatchState) """ state_entry_list = self._state_table().all() if not state_entry_list: return [] return jsonpickle.loads(state_entry_list[0]['pickle'])
def _get_sandbox_details(self, sandbox_id): url = 'http://{hostname}:{api_port}/api/v1/sandboxes/{sandbox_id}' \ .format(hostname=self.hostname, api_port=self.api_port, sandbox_id=sandbox_id) response = requests.get(url, auth=self.token_auth) self._ensure_response_success(response) response_text = self.python_version_compatible_response_text(response) state = jsonpickle.loads(response_text) return state
def from_json(value): """ Convert json to an object. This will only work for objects which were converted to json with unpickable=True. In all other cases, the result will be a dict instead. :param value: the value to convert to an object :return: the object or dict """ return jsonpickle.loads(value)
def deserialize_arguments(args_serialized): """ Provides a method to deserialize arguments provided by the endpoint client. :param args_serialized: arguments and keyword arguments serialized by the client :type args_serialized: str :return: Deserialized representation of the arguments :rtype: object """ return jsonpickle.loads(args_serialized)
def test_orchestration_save_should_save_to_complex_path(self): request = """ { "custom_params": { "folder_path" : "tftp://10.0.0.1/folder1/some folder/and Another Directory" } }""" handler = self._get_handler() json_string = handler.orchestration_save(custom_params=request) print json_string validate(jsonpickle.loads(json_string), schema=get_schema())
def fromJSON(self, text): result = jsonpickle.loads(text) result["startTS"] = DateTimeDecoder(result["startTS"]) result["endTS"] = DateTimeDecoder(result["endTS"]) result["insertTime"] = DateTimeDecoder(result["insertTime"]) # bt1 = DateTimeDecoder(result["eventRange"][0]) # bt2 = DateTimeDecoder(result["eventRange"][1]) # result["eventRange"] = busyT(start=bt1, end=bt2) self.__dict__ = result return result
def test_orchestration_save_should_save_to_simple_ftp_path(self): request = """ { "custom_params": { "folder_path" : "ftp://*****:*****@10.0.0.1/" } }""" handler = self._get_handler() json_string = handler.orchestration_save(custom_params=request) print json_string validate(jsonpickle.loads(json_string), schema=get_schema())
def test_roundtrip(self): if self.should_skip: return self.skip('ecdsa module is not installed') message = 'test'.encode('utf-8') key_pair = self.SigningKey.generate(curve=self.NIST384p) sig = key_pair.sign(message) serialized = jsonpickle.dumps(key_pair.get_verifying_key()) restored = jsonpickle.loads(serialized) self.assertTrue(restored.verify(sig, message))
def set_state(self, state): value = jsonpickle.loads(state.decode('utf-8')) for v in value['open_orders']: o = Order() o.set_state(v) account = self._env.get_account(o.order_book_id) self._open_orders.append((account, o)) for v in value['delayed_orders']: o = Order() o.set_state(v) account = self._env.get_account(o.order_book_id) self._delayed_orders.append((account, o))
def test_get_action_result_info_removevlan(self): test_action_dict = {'customActionAttributes': [{'attributeName': 'VM_UUID', 'attributeValue': 'test-vm-uuid'}], 'connectorAttributes' : [{'attributeName': 'Interface', 'attributeValue': '{"ip_address":"test_ip" , \ "mac_address": "test_mac","port_id":"test_port_id"}'}]} test_action_obj = DeployDataHolder(test_action_dict) print test_action_obj.connectorAttributes[0].attributeValue print jsonpickle.loads(test_action_obj.connectorAttributes[0].attributeValue) mock_deployed_app_resource_name = 'test app name' mock_actionid = 'test-actionid' result = self.connectivity_service.get_action_resource_info(deployed_app_resource_name=mock_deployed_app_resource_name, actionid=mock_actionid, action=test_action_obj) self.assertEqual(result.vm_uuid, 'test-vm-uuid') self.assertEqual(result.iface_ip, 'test_ip') self.assertEqual(result.interface_port_id, 'test_port_id') self.assertTrue(result.interface_mac, 'test_mac')
def test_orchestration_save_should_save_default_config(self): request = """ { "custom_params": { "folder_path" : "tftp://10.0.0.1/folder1", "vrf_management_name": "network-1" } }""" handler = self._get_handler() json_string = handler.orchestration_save(custom_params=request) print json_string validate(jsonpickle.loads(json_string), schema=get_schema())
def test_orchestration_save_get_invalid_folder_path(self): request = """ { "custom_params": { "configuration_type" : "Running", "folder_path" : "tftp://10.0.0.1///folder1//folder 2//folder 5", "vrf_management_name": "network-1" } }""" handler = self._get_handler() json_string = handler.orchestration_save(custom_params=request) print json_string validate(jsonpickle.loads(json_string), schema=get_schema())
def test_serializes_to_schema(self): created_date = datetime.datetime.now() identifier = created_date.strftime('%y_%m_%d %H_%M_%S_%f') orchestration_saved_artifact = OrchestrationSavedArtifact('test_type', identifier) saved_artifacts_info = OrchestrationSavedArtifactInfo( resource_name="some_resource", created_date=created_date, restore_rules=OrchestrationRestoreRules(requires_same_resource=True), saved_artifact=orchestration_saved_artifact) orchestration_save_result = OrchestrationSaveResult(saved_artifacts_info) json_string = jsonpickle.encode(orchestration_save_result, unpicklable=False) validate(jsonpickle.loads(json_string), schema=self.get_schema())
def UserEventfromJSON( JSON): content = jsonpickle.loads(JSON) calvt = model.genCalEvt(content['calevt']) eventTypeInt = content['eventType'] eventTypeNew = EventType.unassigned if eventTypeInt is 1: eventTypeNew = EventType.insert elif eventTypeInt is 2: eventTypeNew = EventType.delete print(calvt) event = UserEvent(eventTypeNew,content['Tiii'],calvt,content['uid']) return event
def __dispach_method(self, method_name, *args, **kwargs): stdin, stdout, stderr = self.ssh_client.exec_command('python3 sshrpc_endpoint.py') stdin.write(method_name + '\n') client_args = [list(args), kwargs] client_args = jsonpickle.dumps(client_args) stdin.write(client_args + '\n') output = stdout.read() # print(output) result = jsonpickle.loads(output.decode("utf-8")) if result.get('failure'): tb = result.get('traceback') print('---- Traceback received from remote endpoint ----\n') print(tb) print('\n----\n') raise RemoteMethodException(result.get('exception_class')) self.ssh_client.close() return result.get('result')