def test_decimal(self): # Default behavior: Decimal is preserved obj = decimal.Decimal(0.5) as_json = jsonpickle.dumps(obj) clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal)) self.assertEqual(obj, clone) # Custom behavior: we want to use simplejson's Decimal support. jsonpickle.set_encoder_options('simplejson', use_decimal=True, sort_keys=True) jsonpickle.set_decoder_options('simplejson', use_decimal=True) # use_decimal mode allows Decimal objects to pass-through to simplejson. # The end result is we get a simple '0.5' value as our json string. as_json = jsonpickle.dumps(obj, unpicklable=True, use_decimal=True) self.assertEqual(as_json, '0.5') # But when loading we get back a Decimal. clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal)) # side-effect: floats become decimals too! obj = 0.5 as_json = jsonpickle.dumps(obj) clone = jsonpickle.loads(as_json) self.assertTrue(isinstance(clone, decimal.Decimal))
def set_state_list(self, state_list): """ :type state_list: list(WatchState) """ state_table = self._state_table() state_table.purge() self._state_table().insert({'pickle': jsonpickle.dumps(state_list)})
def attach_nic_to_net(self, openstack_session, instance_id, net_id, logger): """ :param openstack_session: :param instance_id: :param net_id: :param logging.Logger logger: :return: """ instance = self.get_instance_from_instance_id(openstack_session=openstack_session, instance_id=instance_id, logger=logger) if instance is None: return None try: res = instance.interface_attach(net_id=net_id, port_id=None, fixed_ip=None) iface_mac = res.to_dict().get('mac_addr') iface_portid = res.to_dict().get('port_id') iface_ip = res.to_dict().get('fixed_ips')[0]['ip_address'] result = jsonpickle.dumps({'ip_address':iface_ip, 'port_id':iface_portid, 'mac_address':iface_mac}) # result = "/".join([iface_ip, iface_portid, iface_mac]) return result except Exception as e: logger.error("Exception: {0} during interface attach.".format(e)) raise
def test_nested_objects(self): if self.should_skip: return self.skip('bson is not installed') o = Object(99) serialized = jsonpickle.dumps(o) restored = jsonpickle.loads(serialized) self.assertEqual(restored.offset, datetime.timedelta(99))
def editFile(request): logging.info('Handling editFile request: %s', request.REQUEST.items()) instanceId = request.REQUEST.get('instanceId') sha1 = request.REQUEST.get('sha1') file = models.File.gql('where instanceId = :1 and sha1 = :2', int(instanceId), sha1).get() if file is not None: requestTags = request.REQUEST.get('tags') newTags = toTags(requestTags) file.tags = newTags #url = request.REQUEST.get('url') file.put(); #return HttpResponse() filesInstance = {'tags' : newTags, 'success' : True, 'message' : 'Michael Corleone says hello'} json = jsonpickle.dumps(filesInstance) return jsonControllerUtils.writeResponse(request, json) else: logging.warn('No matching file') return HttpResponseNotFound()
def pickle(self, files, total): newFiles = [] for file in files: #logging.info('file dired: %s', dir(file)) normalizedEntities = toNormalizedDict(file) newFiles.append(normalizedEntities) #TODO TAGS!!!!! #logging.info(newFiles) #JsonUtils.put(json, "totalResults", totalResults); #JsonUtils.put(json, "totalResultsFormatted", formatted); #totalResults = len(newFiles) #formatted = len(newFiles) # TODO: totalResultsFormatted needs to be: # 1) Really the total results # 2) Formatted. filesInstance = { 'totalResults': total, 'totalResultsFormatted': total, 'results' : newFiles} json = jsonpickle.dumps(filesInstance) return json
def test_timedelta(self): if self.should_skip: return self.skip('bson is not installed') td = datetime.timedelta(-1, 68400) serialized = jsonpickle.dumps(td) restored = jsonpickle.loads(serialized) self.assertEqual(restored, td)
def test_simple(self): print self.segment.byte_bounds_offset(), len(self.segment) r2 = self.segment.rawdata[100:400] s2 = DefaultSegment(r2) print s2.byte_bounds_offset(), len(s2), s2.__getstate__() r3 = s2.rawdata[100:200] s3 = DefaultSegment(r3) print s3.byte_bounds_offset(), len(s3), s3.__getstate__() order = list(reversed(range(700, 800))) r4 = self.segment.rawdata.get_indexed(order) s4 = DefaultSegment(r4) print s4.byte_bounds_offset(), len(s4), s4.__getstate__() slist = [s2, s3, s4] for s in slist: print s j = jsonpickle.dumps(slist) print j slist2 = jsonpickle.loads(j) print slist2 for s in slist2: s.reconstruct_raw(self.segment.rawdata) print s for orig, rebuilt in zip(slist, slist2): print "orig", orig.data[:] print "rebuilt", rebuilt.data[:] assert np.array_equal(orig[:], rebuilt[:])
def _save_state(self, change): """ Try to save the plugin state """ if change['type'] in ['update', 'container', 'request']: try: log.info("Saving state due to change: {}".format(change)) #: Dump first so any failure to encode doesn't wipe out the #: previous state state = self.__getstate__() excluded = ['manifest', 'workbench'] + [ m.name for m in self.members().values() if not m.metadata or not m.metadata.get('config', False) ] for k in excluded+self._state_excluded: if k in state: del state[k] state = pickle.dumps(state) #: Pretty format it state = json.dumps(json.loads(state), indent=2) dst = os.path.dirname(self._state_file) if not os.path.exists(dst): os.makedirs(dst) with open(self._state_file, 'w') as f: f.write(state) except Exception as e: log.warning("Failed to save state: {}".format( traceback.format_exc() ))
def test_FixedOffsetSerializable(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60*5, 'EST') serialized = jsonpickle.dumps(fo) restored = jsonpickle.loads(serialized) self.assertEqual(vars(restored), vars(fo))
def save_to_uri(self, bytes, uri, save_metadata=True): # Have to use a two-step process to write to the file: open the # filesystem, then open the file. Have to open the filesystem # as writeable in case this is a virtual filesystem (like ZipFS), # otherwise the write to the actual file will fail with a read- # only filesystem error. if uri.startswith("file://"): # FIXME: workaround to allow opening of file:// URLs with the # ! character uri = uri.replace("file://", "") fs, relpath = opener.parse(uri, writeable=True) fh = fs.open(relpath, 'wb') log.debug("saving to %s" % uri) fh.write(bytes) fh.close() if save_metadata: metadata_dict = dict() self.get_extra_metadata(metadata_dict) if metadata_dict: relpath += ".omnivore" log.debug("saving extra metadata to %s" % relpath) jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) bytes = jsonpickle.dumps(metadata_dict) text = jsonutil.collapse_json(bytes) header = self.get_extra_metadata_header() fh = fs.open(relpath, 'wb') fh.write(header) fh.write(text) fh.close() self.metadata_dirty = False fs.close()
def power_on(self, command_context): """ Powers On the instance. :param cloudshell.shell.core.context.ResourceRemoteCommandContext command_context: :rtype None: """ with LoggingSessionContext(command_context) as logger: with ErrorHandlingContext(logger): with CloudShellSessionContext(command_context) as cs_session: resource_model = self.model_parser.get_resource_model_from_context(command_context.resource) context_remote = command_context.remote_endpoints[0] if context_remote is None: raise ValueError("Cannot get remote_endpoint for command context: {0}".format(command_context)) deployed_app_resource = self.model_parser.deployed_app_resource_from_context_remote(context_remote) deployed_app_fullname = context_remote.fullname logger.debug(jsonpickle.dumps(deployed_app_resource, unpicklable=False)) os_session = self.os_session_provider.get_openstack_session(cs_session, resource_model, logger) self.power_operation.power_on(openstack_session=os_session, cloudshell_session=cs_session, deployed_app_resource=deployed_app_resource, resource_fullname=deployed_app_fullname, logger=logger)
def test_sqlalchemy_with_dynamic_table(self): """Test creating a table dynamically, per #180""" if self.should_skip: return self.skip('sqlalchemy is not installed') meta = sqa.MetaData() expect = sqa.Table( 'test', meta, sqa.Column('id', sqa.Integer()), sqa.Column('text', sqa.Text())) jsonstr = jsonpickle.dumps(expect) actual = jsonpickle.loads(jsonstr) self.assertEqual(expect.__class__, actual.__class__) self.assertEqual(expect.name, actual.name) # These must be unique instances self.assertNotEqual(expect.metadata, actual.metadata) # Columns names must exactly match self.assertEqual(sorted(expect.columns.keys()), sorted(actual.columns.keys())) # As should the types self.assertEqual(expect.c.id.name, actual.c.id.name) self.assertEqual(expect.c.id.type.__class__, actual.c.id.type.__class__) self.assertEqual(expect.c.text.name, actual.c.text.name) self.assertEqual(expect.c.text.type.__class__, actual.c.text.type.__class__)
def test_datetime_with_fixed_offset(self): if self.should_skip: return self.skip('bson is not installed') fo = bson.tz_util.FixedOffset(-60*5, 'EST') dt = datetime.datetime.now().replace(tzinfo=fo) serialized = jsonpickle.dumps(dt) restored = jsonpickle.loads(serialized) self.assertEqual(restored, dt)
def get_state(self): result = {} for key, obj in six.iteritems(self._objects): state = obj.get_state() if state is not None: result[key] = state return jsonpickle.dumps(result).encode('utf-8')
def delegate_submission(request): # get submission object sub_id = request.POST.get('sub_id') if not sub_id: sub_id = request.GET.get('sub_id') # tonietuk's intercept starts if not sub_id: return HttpResponse({'status': 0}) # tonietuk's intercept ends sub = Submission().get_record(sub_id) repo = sub['repository'] ## Submit to Figshare if repo == 'figshare': # check figshare credentials if figshareSubmission.FigshareSubmit(sub_id).isValidCredentials(user_id=request.user.id): figshareSubmission.FigshareSubmit(sub_id).submit( sub_id=sub_id, dataFile_ids=sub['bundle'], ) return HttpResponse(jsonpickle.dumps({'status': 1})) else: # forward to control view return HttpResponse(jsonpickle.dumps({'status': 1, 'url': reverse('copo:authenticate_figshare')})) ## Submit to ENA elif repo == 'ena': enaSubmission.EnaSubmit().submit( sub_id=sub_id, dataFile_ids=sub['bundle'], ) return HttpResponse(jsonpickle.dumps({'status': 1})) # return default return HttpResponse({'status': 0})
def _execution_result(v): data = {'error_before_exec': v.error_before_exec, 'error_in_exec': v.error_in_exec, 'execution_count': v.execution_count, 'result': v.result, 'success': v.success } return jsonpickle.dumps(data)
def downloadSources(request): logging.info('Handling download sources request: %s', request.REQUEST.items()) # We need to query all file instances for the given URI and that are # listed as online. # TODO: We could eventually add optimizations based on IP, time zone, etc # here. uri = request.REQUEST.get('uri') metaFile = models.MetaFile.gql('where uri = :1', uri).get() if metaFile is None: logging.info('No matching URI found') return HttpResponseNotFound('No matching URI found') downloads = metaFile.numDownloads + 1 metaFile.numDownloads = downloads metaFile.put() if metaFile.numOnlineInstances is 0: logging.warn("No online instances") return HttpResponseNotFound('No online instances') filesQuery = models.File.all() filesQuery.filter('uri = ', uri) filesQuery.filter('instanceOnline = ', True) # The number of files shouldn't be too huge, but we'd ideally use a # distributed counter here. #total = filesQuery.count() #filesQuery.order('title') files = filesQuery.fetch(200) sha1 = metaFile.sha1 urls = [] for file in files: #url = 'sip://' + str(file.instanceId) + '/uri-res/N2R?' + sha1 url = 'sip://' + str(file.instanceId) + '/uri-res/N2R-' + sha1 urls.append(url) if len(urls) == 0: logging.warn("Setting numOnlineInstances to 0") metaFile = models.MetaFile.gql('where uri = :1', uri).get() metaFile.numOnlineInstances = 0 metaFile.put() logging.info('Added all instance data...pickling JSON...') sources = { 'title' : metaFile.title, 'downloads' : downloads, 'urls' : urls, 'sha1' : sha1 } json = jsonpickle.dumps(sources) logging.info('Returning JSON: %s', json) return HttpResponse(json)
def json_out_handler(*args, **kwargs): """ Custom cherrypy json_handler which uses jsonpickle for converting objects to json. This is a copy from cherrypy.jsontools json_handler() but encodes with jsonpickle. Remark: cherrypy requires a bytes output, so make sure to convert it to bytes. """ value = cherrypy.serving.request._json_inner_handler(*args, **kwargs) return s2b(jsonpickle.dumps(value, unpicklable=False))
def to_json(value, unpickable=False): """ Convert an object to json. :param value: the value to convert to json :param unpickable: if set to true, the json will contain extra info to convert the json back to the object :return: the json string """ return jsonpickle.dumps(value, unpicklable=unpickable)
def saveModelStep(self): # Where should we save the model? # disk/database? # Right now, let's save to disk # The assumption is that people will run the script first to generate the model # Need to figure out how others will get seed data model_rep = jpickle.dumps(self.model) with open(SAVED_MODEL_FILENAME, "w") as fd: fd.write(model_rep)
def create(): form = OrderForm() if form.validate_on_submit(): save_order(form.data) return redirect(url_for("orders.list")) clients = get_all_clients() form.client.choices = [(item.id, "{} {}".format(item.name, item.surname)) for item in clients] client_choices = jsonpickle.dumps(clients) products = get_all_products() product_choices = {} for product in products: product_choices[product.id] = {"id": product.id, "price": product.string_price} product_choices = jsonpickle.dumps(product_choices) return render_template( "orders/create.html", form=form, client_choices=client_choices, product_choices=product_choices )
def get(self): try: data = dao.get_all_categories() if data is not None: return jsonify({"categoryList": json.loads(jsonpickle.dumps(data, unpicklable=False))}) else: raise except Exception as e: print("AllCategoryApi", e)
def sctoolbox_get(request): ''' :param request: :return: Return a list of all the sctools which have a get_parser ''' # logging.info("access to {} route".format(inspect.stack()[1][3])) session = request.db rt = session.query(models.RegisteredTool).all() return jsonpickle.dumps(rt)
def test_roundtrip(self): if self.should_skip: return self.skip('ecdsa module is not installed') message = 'test'.encode('utf-8') key_pair = self.SigningKey.generate(curve=self.NIST384p) sig = key_pair.sign(message) serialized = jsonpickle.dumps(key_pair.get_verifying_key()) restored = jsonpickle.loads(serialized) self.assertTrue(restored.verify(sig, message))
def jsonify(func, *args, **kwargs): """Creates a :class:`~flask.Response` with the JSON representation of the given arguments with an `application/json` mimetype. The arguments to this function are the same as to the :class:`dict` constructor. It is a decorator for :func:`flask.jsonify`, to auto serialize object or dictionary to json response by :module:`jsonpickle` & :func:`flask.jsonify` Example usage:: from flask.views import MethodView from .decorator import jsonify class User(object): def __init__(self, name): self.name = name class UserView(MethodView): decorators = [jsonify] def get(self): return {"name": "Tony"} def post(self): user = User("Tony") return user This will send a JSON response like this to the browser:: { "name": "Tony", } For security reasons only objects are supported toplevel. For more information about this, have a look at :ref:`json-security`. This function's response will be pretty printed if it was not requested with ``X-Requested-With: XMLHttpRequest`` to simplify debugging unless the ``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to false. """ result = func(*args, **kwargs) if isinstance(result, dict): pass elif isinstance(result, Response): return result elif type(result) in JSON_TYPES: result = {"data": result} else: result = dumps(result, unpicklable=False) return flask_jsonify(result)
def save_area(self): """ Save the dock area for the workspace. """ print("Saving dock area") area = self.content.find('dock_area') try: with open('inkcut.workspace.db', 'w') as f: f.write(pickle.dumps(area)) except Exception as e: print("Error saving dock area: {}".format(e)) return e
def publish(self, receiver, type, msg): if receiver is None or type is None or msg is None: raise AttributeError if not self.connected: raise Exception("No connection!") print('Connected!') m = self.Message(receiver, type, msg) needtosend = jsonpickle.dumps(m) # msga = {'receiver': '/turtle1/command_velocity', 'type': 'turtlesim/Velocity', 'msg': {'linear': 20.0, 'angular': 20.0}} # msgb = {"receiver": "/turtle1/command_velocity", "type": "turtlesim/Velocity", "msg": {"linear": 20.0, "angular": 20.0}} print(needtosend) self.send(needtosend) print('Message sent!')
def dependency_graph(self): """ A dico containing the dependency of subprocess part of a process This is meant to be overwritten by the specific execution class the dico is of the following type: {child1:[parent1,parent2, ...], child2: None, child3: [parent1, patent4], ...} None means that the subprocess has no dependency if there is no dependency then : {self.name: 'None'} should do the job """ return jsonpickle.dumps({self.name: 'None'})
def _get_encoded_attachments(self): attachments = self.get_attachments() new_attachments = [] for attachment in attachments or []: if isinstance(attachment, File): attachment.seek(0) new_attachments.append((attachment.name, attachment.read(), guess_type(attachment.name)[0])) else: new_attachments.append(attachment) return jsonpickle.dumps(new_attachments)
def buyItem(email, item): item = item.replace("\"", "") player = get_current_player(email) uni = get_current_universe(email) email_tag = str(email).replace('.', '_') date = db.child('users').child(email_tag).child( 'current_player').get().val() planetInventory = universe.getPlanetMarket(player.location.name, uni).marketInventory playerInventory = player.inventory if planetInventory.inventory[item].stock > 0: if player.credits >= planetInventory.inventory[item].price: playerInventory.inventory[item].stock += 1 player.credits -= planetInventory.inventory[item].price planetInventory.inventory[item].stock -= 1 pickle_player = jsonpickle.dumps(player) pickle_solar1 = jsonpickle.dumps(uni.solarSystems[0]) pickle_solar2 = jsonpickle.dumps(uni.solarSystems[1]) pickle_solar3 = jsonpickle.dumps(uni.solarSystems[2]) db.child('users').child(email_tag).child("players").child( date).child("player_info").set(pickle_player) db.child('users').child(email_tag).child("players").child( date).child("universe").child('solar1').set(pickle_solar1) db.child('users').child(email_tag).child("players").child( date).child("universe").child('solar2').set(pickle_solar2) db.child('users').child(email_tag).child("players").child( date).child("universe").child('solar3').set(pickle_solar3) return player.credits else: return "You do not have enough credits." else: return "Item is out of stock."
def save(self, file: str) -> NoReturn: with open(file, 'w') as f: saved = collections.defaultdict(dict) saved.update({ 'n_bins': self.n_bins, 'laplace_m': self.laplace_m, 'binners': { jsonpickle.dumps(feature): jsonpickle.dumps(binner) for feature, binner in self.binners.items() } }) for feature, f_params in self.params.items(): json_feature = jsonpickle.dumps(feature) if feature.type == mldata.Feature.Type.CLASS: saved['params'][json_feature] = dict(f_params) else: saved['params'][json_feature] = { f'{val[0]}_{val[1]}': param for val, param in f_params.items() } saved['params'][json_feature].update( {'default': f_params.default_factory()}) json.dump(saved, f, indent='\t')
def test_schedule_queue(): publish_event = MagicMock() publish_event.return_value = 'aaa' task_queue = TaskQueue(publish_event) @task_queue.task(schedules=['bbb']) def funcy(): return "blah" task_queue.process_schedule('bbb') event = jsonpickle.dumps((funcy.path, (), {})) publish_event.assert_called_once_with(event)
def handle_data(self, _, c, d, t, a, b, g): if c == 3 or c == 4: return self._mutex.acquire() try: self._count = (self._count + 1) % 19 finally: self._mutex.release() if self._count == 0: requests.post("http://localhost:5000", data=jsonpickle.dumps(OscObject(c, d, t, a, b, g)))
def getReaderInfo(request): # 获取读者条形码 readerid = request.GET.get('readerid', 0) readerid = int(readerid) # 获取读者信息 reader = Readerinfo.objects.get(barcode=readerid) typename = reader.rtid.typename num = reader.rtid.num sreader = jsonpickle.dumps(reader, unpicklable=False) return JsonResponse({'reader': sreader, 'typename': typename, 'num': num})
def add(self,goodsid,colorid,sizeid,count,*args,**kwargs): #获取购物项的唯一标示 key = self.__get_key(goodsid,colorid,sizeid) # session {'cart':{key1:item}} # session('cart',[{key1:cartitem,key2:cartitem}]) if key in self.session[self.cart_name]: self.update(goodsid,colorid,sizeid,count,*args,**kwargs) else: self.session[self.cart_name][key] = jsonpickle.dumps(CartItem(goodsid=goodsid,colorid=colorid,sizeid=sizeid,count=count))
def post(self, request): uname = request.POST.get('account', '') pwd = request.POST.get('password', '') log(pwd) if Account.objects.filter(uname=uname): return redirect('/user/register/') else: pwd = hashlib.sha256(pwd.encode('utf-8')) pwd1 = pwd.hexdigest() log(pwd1) user = Account.objects.create(uname=uname, pwd=pwd1) request.session['user'] = jsonpickle.dumps(user) # return redirect('/home/') return redirect('center')
def marshal(output): """ Returns the json serialized output of function calls which is sent back to clients: """ # TODO: How should we return the return value? output = marshaltype(output) try: # If it is json serializable, do it: return_json = json.dumps(output) except TypeError as ex: # Else just parse it to string and return its string represtation. return_json = jsonpickle.dumps(output, unpicklable=False) #print(ex) return return_json
def post(self, request): uname = request.POST.get("reg_userid") pwd = request.POST.get("reg_passwd") reflag = request.POST.get('reflag') user = User.objects.filter(username=uname, password=pwd) if user: request.session['user'] = jsonpickle.dumps(user[0]) SessionCartManager(request.session).migrateSession2DB() if reflag == 'cart': return HttpResponseRedirect('/cart/queryAll/') return HttpResponseRedirect('/user/center/') return render(request, 'net_app/login.html', {'msg': '用户名或密码错误'})
def logintwo(request): if request.method == 'GET': return render(request, 'logintwo.html') else: #接受请求参数 uname = request.POST.get('uname', '') pwd = request.POST.get('pwd', '') import jsonpickle #判断 if uname == 'zhangsan' and pwd == '123': user = User(uname, pwd) request.session['login'] = jsonpickle.dumps(user) return HttpResponseRedirect('/one/usercentertwo') return HttpResponseRedirect('/one/logintwo')
def dump(target_path, agents=None, paths=None, grid=None, metadata: dict = None): if not agents and not paths: raise ValueError( 'At least one of agents, paths should be non-empty/none') with zipfile.ZipFile(target_path, 'w', zipfile.ZIP_DEFLATED) as z: if paths is None: try: paths_for_csv = [[((s.x, s.y), s.step) for s in a.path] for a in agents] except Exception: paths_for_csv = [] else: paths_for_csv = paths max_length = max(len(p) for p in paths_for_csv) for p in paths_for_csv: if len(p) < max_length: p.extend([None] * (max_length - len(p))) df = pandas.DataFrame(paths_for_csv) paths_csv_text = df.to_csv(None, index=False, header=False) z.writestr(PATHS_FILE_NAME, paths_csv_text) if agents is None: agent_dicts = None else: agent_dicts = [a.to_dict() for a in agents] for ad in agent_dicts: for p in ad.get('path', []): for attr in ['_agent', '_agent_repo', 'grid']: try: setattr(p, attr, None) except (KeyError, AttributeError): pass agents_dump = jsonpickle.dumps(agent_dicts) z.writestr(AGENTS_FILE_NAME, agents_dump) if grid: z.writestr(GRID_FILE_NAME, grid.to_str()) if metadata: z.writestr(METADATA_FILE_NAME, json.dumps(metadata, indent=2))
def delete(self, cache_key: str, objects: Union[Dict, Set[Any]]): """ If any of these items exist in the cache for this set of stored values, delete them. :param cache_key: Key to potentially delete items from :param objects: *Keys* in the cached DICT to delete, or items in a cached LIST to delete. """ if isinstance(objects, Set): objects = list(objects) if len(objects) > 0: log.info( f'Deleting {len(objects)} items from local cache: {objects}') contents: Dict = jsonpickle.decode(self.__read()) cache = contents.get(cache_key, {}) refresh_time = cache.get(self._LAST_REFRESH_KEY, 0) cache_obj = cache.get(self._STORE_KEY) log.info(f'In cache: {cache_obj}') if isinstance(cache_obj, Dict) and isinstance( objects, Dict) or cache_obj is None: log.info(f'Cache Obj is a dict') if cache_obj: for obj in objects: del cache_obj[obj] elif isinstance(cache_obj, List) and isinstance( objects, List) or cache_obj is None: log.info(f"Cache obj is a list..") if cache_obj: cache_obj = list(set(cache_obj) - set(objects)) else: raise RuntimeError( "Invalid state detected. Cache contains an invalid type that cannot be appended to, " "or the type provided does not match the type stored in the cache." ) log.info(f'New cache obj: {cache_obj}') contents[cache_key] = { self._STORE_KEY: cache_obj, self._LAST_WRITE_KEY: Utils.millis_since_epoch(), self._LAST_REFRESH_KEY: refresh_time, } self.__write(jsonpickle.dumps(contents)) else: log.info('No cached items found to add to cache.')
def post(self,request,*args,**kwargs): #获取参数 uname = request.POST.get('userNum','') pwd = request.POST.get('userPw','') #非空判断 if uname and pwd: #核对数据库 user= UserInfo.objects.filter(user_num =uname,user_pw=pwd) if user: user_obj =User(uname=uname,pwd=pwd) user_str = jsonpickle.dumps(user_obj) request.session['user_str']=user_str request.session.set_expiry(3*365*24*60*60) return HttpResponseRedirect('/customer/') return HttpResponseRedirect('/employee/login/')
def create(): form = OrderForm() if form.validate_on_submit(): save_order(form.data) return redirect(url_for('orders.list')) clients = get_all_clients() form.client.choices = [(item.id, '{} {}'.format(item.name, item.surname)) for item in clients] client_choices = jsonpickle.dumps(clients) products = get_all_products() product_choices = {} for product in products: product_choices[product.id] = { 'id': product.id, 'price': product.string_price, } product_choices = jsonpickle.dumps(product_choices) return render_template('orders/create.html', form=form, client_choices=client_choices, product_choices=product_choices)
def login1_view(request): if request.method=='GET': return render(request,'Cookie_login1.html') else: #获取请求参数 uname=request.POST.get('uname','') pwd=request.POST.get('pwd','') #判断是否登陆成功 if uname=='zhangsan' and pwd=='123': user=User(uname,pwd) request.session['user']=jsonpickle.dumps(user) return redirect('/test_Cookie/main/') else: return redirect('/test_Cookie/login1/')
def test_datetime_with_fixed_offset_incremental(self): """Test creating an Unpickler and incrementally encoding""" if self.should_skip: return self.skip('bson is not installed') obj = datetime.datetime(2019, 1, 29, 18, 9, 8, 826000, tzinfo=bson.tz_util.utc) doc = jsonpickle.dumps(obj) # Restore the json using a custom unpickler context. unpickler = jsonpickle.unpickler.Unpickler() jsonpickle.loads(doc, context=unpickler) # Incrementally restore using the same context clone = json.loads(doc, object_hook=lambda x: unpickler.restore(x, reset=False)) self.assertEqual(obj.tzinfo.__reduce__(), clone.tzinfo.__reduce__())
def do_mcts(): fp = SequentialFunctionPredictor(k_in_knn=K_IN_KNN,laplacian_beta_knn=LAPLACIAN_BETA_KNN,laplacian_beta_markov=LAPLACIAN_BETA_MARKOV,num_attributes_to_include=10) fp.predict_mcts(epsilon_greedy=0.1) accuracy = fp.eval_dataset_accuracy(fp.narratives,'label','prediction_mcts') print 'accuracy gt vs mcts',fp.eval_dataset_accuracy(fp.narratives,'label','prediction_mcts') ranks_mcts = fp.eval_dataset_rank(fp.narratives,'distribution_mcts') print util.describe_distribution(ranks_mcts) print 'accuracy gt vs knn',fp.eval_dataset_accuracy(fp.narratives,'label','prediction_knn') ranks_knn = fp.eval_dataset_rank(fp.narratives,'prediction_knn') print util.describe_distribution(ranks_knn) print 'accuracy knn vs mcts',fp.eval_dataset_accuracy(fp.narratives,'prediction_knn','prediction_mcts') stories = util.flatten([[i.story for j in i.data] for i in fp.narratives]) for i in zip(stories,ranks_knn,ranks_mcts): print i open("mcts_knn_markov_ranks.json",'w').write(jsonpickle.dumps((ranks_knn,ranks_mcts))) open("mcts_knn_markov_narratives.json",'w').write(jsonpickle.dumps(fp.narratives)) if False: ranks = fp.eval_dataset_rank(fp.narratives,'distribution_mcts') print accuracy,util.describe_distribution(ranks) else: print accuracy
def _save_file(self): # alternative _save_file(): save_jsonpickle(self.file_name, self.secrets) # only save vault if it was updated if self.is_updated: # add key vault password to saved key vault file self.secrets[self.password_key] = self.password # pickle the vault dict as a json string and save it to a json file in a zip file json_data = jsonpickle.dumps(self.secrets) zip_file = zipfile.ZipFile(self._file_name(), mode='w', compression=zipfile.ZIP_DEFLATED) zip_file.writestr(self.vault_json_file, json_data.encode()) # clear is_updated status self.is_updated = False
def save(db, name, prefix="", method="pickle"): """ Intelligent data save :param db: The object to save :param name: File name without extension :param prefix: extension prefix file.prefix.extension :param method: pickle | jsonpickle | json | pretty (pretty json) """ if prefix != "": name += f".{prefix}" if method == "pickle": name += ".pickle" elif method == "json" or method == "jsonpickle": name += ".json" elif method == "pretty": name += ".pretty.json" else: raise ValueError(f"Unknown method {method}") logging.info(f"Save {name}") if method == "pickle": with open(name, "wb") as f: pickle.dump(db, f) else: with open(name, "w") as f: if method == "json" or method == "pretty": try: json.dump(db, f, indent=4 if method == "pretty" else None) except TypeError: s = jsonpickle.dumps( db, unpicklable=False, indent=4 if method == "pretty" else None) f.write(s) else: s = jsonpickle.dumps(db, unpicklable=False, indent=4) f.write(s)
def get_statistics(self): """ Get swiping statistics from database :return: swiping statistics as JSON """ number_of_users_swiped = db.PotentialMatch.select().count() number_of_users_matched = db.PotentialMatch.select().where( db.PotentialMatch.matched == True).count() match_rate = 0 if number_of_users_swiped == 0 else float( number_of_users_matched) / number_of_users_swiped return jsonpickle.dumps({ "swiped": number_of_users_swiped, "matched": number_of_users_matched, "match_rate": match_rate })
def filter_and_format_para(paras): output = [] keys_path = Path('keys.yml').resolve() keys_dict = parse_keys(keys_path) for i, para in enumerate(paras): para_str = ''.join([token.content for token in para[1]]) for key_type, keys in keys_dict.items(): for key in keys: idx = para_str.find(key) if idx >= 0: output.append({'ex': para_str, 'order': i, 'type': key_type, 'key': key, 'pos': idx}) break output = unique_objects(output, 'ex') return jp.dumps(output)
def append(self, cache_key: str, objects: Union[Dict, Set[Any]]): """ Add a set or dictionary of items to the existing cache. Must be the SAME type as what is stored in the existing cache under cache_key. Lists not supported due to issues with duplicates continually growing in the local cache :) :param cache_key: Key to append or merge items with :param objects: Objects to add. """ if isinstance(objects, Set): objects = list(objects) if len(objects) > 0: log.debug( f'Appending {len(objects)} items to local cache: {objects}') contents: Dict = jsonpickle.decode(self.__read()) cache = contents.get(cache_key, {}) refresh_time = cache.get(self._LAST_REFRESH_KEY, 0) cache_obj = cache.get(self._STORE_KEY) if isinstance(cache_obj, Dict) and isinstance( objects, Dict) or cache_obj is None: if cache_obj: cache_obj.update(objects) else: cache_obj = objects elif isinstance(cache_obj, List) and isinstance( objects, List) or cache_obj is None: cache_obj = list(set(cache_obj + objects)) if cache_obj else objects else: raise RuntimeError( "Invalid state detected. Cache contains an invalid type that cannot be appended to, " "or the type provided does not match the type stored in the cache." ) contents[cache_key] = { self._STORE_KEY: cache_obj, self._LAST_WRITE_KEY: Utils.millis_since_epoch(), self._LAST_REFRESH_KEY: refresh_time, } self.__write(jsonpickle.dumps(contents)) else: log.info('No cached items found to add to cache.')
def forward_record(user, c): """ 根据c(current_game object) 向前更新user的current_game Record """ # 创建一个新记录 record = Record(status=jsonpickle.dumps(c), time=timezone.now(), player=user, parent=user.Profile.current_game) record.save() # 把user的current_game指向这个新记录 profile = user.Profile profile.current_game = record profile.save()
def push_baonoi_restart_signal_to_queue(self, queue_name): # Input # ------- # trends: an array of dict # ['topic': topic_i, 'posts': [post1, post2, post..n] (1<=i<=6; 1<=n<=6) # prepare data connection = self._connection body = jsonpickle.dumps({"signal": "restart"}) # get queues channel = connection.channel() queue_state = channel.queue_declare(queue_name, durable=True) # push trends channel.basic_publish(exchange='', routing_key=queue_name, body=body)
def post(self,request): uname = request.POST.get('account','') pwd = request.POST.get('password','') red = request.POST.get('redirect','') userList = UserInfo.objects.filter(uname = uname,pwd = pwd) if userList: request.session['user'] = jsonpickle.dumps(userList[0]) if red == 'cart': #将session中的购物项转移到数据库表中 SessionCartManager(request.session).migrateSession2DB() return HttpResponseRedirect('/cart/queryAll/') elif red == 'order': cartitems = request.POST.get('cartitems','') return HttpResponseRedirect('/order/toOrder/?cartitems=' + cartitems) return HttpResponseRedirect('/user/center/') return HttpResponseRedirect('/user/login/')
def _get_encoded_attachments(self): attachments = self.get_attachments() new_attachments = [] for attachment in attachments or []: if isinstance(attachment, File): # cannot do with attachment.open() since django 1.11 doesn't support that attachment.open() new_attachments.append((attachment.name, attachment.read(), guess_type(attachment.name)[0])) attachment.close() else: new_attachments.append(attachment) return jsonpickle.dumps(new_attachments)
def set_session_login(request): if request.method == 'GET': return render(request, 'set_session_login.html') else: #接收接口参数 uname = request.POST.get('uname', '') pwd = request.POST.get('pwd', '') flag = request.POST.get('flag', '') #判断是否登录成功 if uname == 'zhangsan' and pwd == '123': request.session['login'] = uname user = UserCenter(uname, pwd) request.session['login_'] = jsonpickle.dumps( user) #把对象序列化成普通字符串存入session return HttpResponseRedirect('/cookie_session/usercenter/') return HttpResponseRedirect('/cookie_session/set_session_login/')
def callback(ch, method, properties, body): try: print("{} - RabbitMQ message received - begin processing".format(hostname), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="RabbitMQ message received - begin processing") print (jsonpickle.loads(body)) fileName = jsonpickle.loads(body)['fileName'] imgHash = jsonpickle.loads(body)['imgHash'] imgData = pickle.loads(codecs.decode(jsonpickle.loads(body)['imgData'].encode(), "base64")) redisNameToHash.set(fileName, imgHash) # filename -> hash DB1 print("Entry added to DB1 - redisNameToHash: <{}><{}>".format(fileName, imgHash), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB1 - redisNameToHash") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB1: <{}><{}>".format(fileName, imgHash)) redisHashToName.sadd(imgHash, fileName) # hash -> [filename] DB2 print("Entry added to DB2 - redisHashToName: <{}><{}>".format(imgHash, redisHashToName.smembers(imgHash)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB2 - redisHashToName") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB2: <{}><{}>".format(imgHash, redisHashToName.smembers(imgHash))) # Load the received image file img = face_recognition.load_image_file(imgData) # Get face encodings for any faces in the received image unknown_face_encodings = face_recognition.face_encodings(img) if len(unknown_face_encodings) > 0: for i in unknown_face_encodings: redisHashToFaceRec.sadd(imgHash, jsonpickle.dumps(i)) # hash -> [face encodings] DB3 print("Entry added to DB3 - redisHashToFaceRec: <{}><{}>".format(imgHash, redisHashToFaceRec.smembers(imgHash)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entry added to DB3 - redisHashToFaceRec") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB3: <{}><{}>".format(imgHash, redisHashToFaceRec.smembers(imgHash))) for key in redisHashToFaceRec.scan_iter(): known_face_encodings = [jsonpickle.loads(j) for j in redisHashToFaceRec.smembers(key)] if any(face_recognition.compare_faces(known_face_encodings, i)): redisHashToHashSet.sadd(imgHash, key) # hash -> [hashes] DB4 redisHashToHashSet.sadd(key, imgHash) # hash -> [hashes] DB4 print("Entries added to DB4 - redisHashToHashSet: <{}><{}>, <{}><{}>".format(imgHash, redisHashToHashSet.smembers(imgHash), key, redisHashToHashSet.smembers(key)), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=info, body="Entries added to DB4 - redisHashToHashSet") rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="DB4: <{}><{}>, <{}><{}>".format(imgHash, redisHashToHashSet.smembers(imgHash), key, redisHashToHashSet.smembers(key))) print("{} - RabbitMQ message processed and acknowledged".format(hostname), file=sys.stderr) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="RabbitMQ message processed and acknowledged") ch.basic_ack(delivery_tag=method.delivery_tag) except Exception as e: #send negative ack print ("Error in callback: {}".format(e)) rabbitMQChannel.basic_publish(exchange='logs', routing_key=debug, body="Error in callback: {}".format(e)) channel.basic_nack(delivery_tag=method.delivery_tag)
def test_format_outputs(self): provider = VMDiagnosticsProvider() diag = provider.get_diagnostics(DiagnosticsOutputFormat.string) json.dumps(diag) diag = provider.get_diagnostics(DiagnosticsOutputFormat.json) json.dumps(diag) diag = provider.get_diagnostics(DiagnosticsOutputFormat.data) json.dumps(diag) with (self.assertRaises(ValueError)): provider.get_diagnostics("UNKNOWN FORMAT")