def loads(s, app=None, **kwargs): """Deserialize an object from a JSON-formatted string ``s``. If there is an portal_forum context pushed, use the current portal_forum's configured decoder (:attr:`~flask.Flask.json_decoder`), or fall back to the default :class:`JSONDecoder`. Takes the same arguments as the built-in :func:`json.loads`, and does some extra configuration based on the application. If the simplejson package is installed, it is preferred. :param s: JSON string to deserialize. :param app: App instance to use to configure the JSON decoder. Uses ``current_app`` if not given, and falls back to the default encoder when not in an portal_forum context. :param kwargs: Extra arguments passed to :func:`json.dumps`. .. versionchanged:: 1.0.3 ``portal_forum`` can be passed directly, rather than requiring an portal_forum context for configuration. """ _load_arg_defaults(kwargs, app=app) if isinstance(s, bytes): encoding = kwargs.pop("encoding", None) if encoding is None: encoding = detect_encoding(s) s = s.decode(encoding) return _json.loads(s, **kwargs)
def get(self): b = Microwave.query.get_or_404(1) # return jsonify(microwave_schema(b)) b_json = _json_.dumps(b, cls=JSONEncoder) print(type(b_json)) # str c_json = _json_.loads(b_json) print(type(c_json)) # dict return c_json
def get(self): self.get_data() self.all_data['data'] = self.data self.all_data['max_temperature'] = max(self.temp) self.all_data['min_temperature'] = min(self.temp) d_str = _json_.dumps(self.all_data, cls=JSONEncoder) d_dict = _json_.loads(d_str) print(type(d_dict)) return d_dict
def loads(s, **kwargs): """Unserialize a JSON object from a string ``s`` by using the application's configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an application on the stack. """ _load_arg_defaults(kwargs) if isinstance(s, bytes): s = s.decode(kwargs.pop('encoding', None) or 'utf-8') return _json.loads(s, **kwargs)
def jsonloads(s, **kwargs): """Unserialize a JSON object from a string ``s`` by using the configured decoder. """ kwargs.setdefault('cls', _JSONDecoder) if isinstance(s, bytes): encoding = kwargs.pop('encoding', None) if encoding is None: encoding = _detect_encoding(s) s = s.decode(encoding) return json.loads(s, **kwargs)
def loads(s, **kwargs): """把一个字符串 ``s`` 加载成非序列化的 JSON 对象 。 如果在堆栈上有一个网络应用的话, 使用网络应用的配置解码器 (:attr:`~flask.Flask.json_decoder`) 。 """ _load_arg_defaults(kwargs) if isinstance(s, bytes): encoding = kwargs.pop('encoding', None) if encoding is None: encoding = detect_encoding(s) s = s.decode(encoding) return _json.loads(s, **kwargs)
def loads(s, **kwargs): """Unserialize a JSON object from a string ``s`` by using the application's configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an application on the stack. """ _load_arg_defaults(kwargs) if isinstance(s, bytes): encoding = kwargs.pop("encoding", None) if encoding is None: encoding = detect_encoding(s) s = s.decode(encoding) return _json.loads(s, **kwargs)
def test_jsonify_date_types(self): test_dates = (datetime.datetime(1973, 3, 11, 6, 30, 45), datetime.date(1975, 1, 5)) app = Flak(__name__) c = app.test_client() for i, d in enumerate(test_dates): url = "/datetest{0}".format(i) f = lambda cx, val=d: cx.jsonify(x=val) app.add_url_rule(url, f, str(i)) rv = c.get(url) assert rv.mimetype == "application/json" assert json.loads(rv.data)["x"] == http_date(d.timetuple())
def test_jsonify(self): d = dict(a=23, b=42, c=[1, 2, 3]) app = Flak(__name__) @app.route("/kw") def return_kwargs(cx): return cx.jsonify(**d) @app.route("/dict") def return_dict(cx): return cx.jsonify(d) c = app.test_client() for url in "/kw", "/dict": rv = c.get(url) assert rv.mimetype == "application/json" assert json.loads(rv.data) == d
def test_get_list_params(self): a = requests_mock.Adapter() with self.obj as h: h.handle._session.mount('httpmock', a) filter, sort, pagination = Filters( field1__lt=14), Sorts('-field2'), Pagination(offset=30, limit=10) test_data = { 'objects': [ { 'some_id': 444, 'foo': 'bar' }, { 'some_id': 555, 'foo': 'baz' }, ], 'num_results': 2 } test_params = { 'model_name': "SomeModel1", 'pk': 'some_id', 'filters': filter, 'sorts': sort, 'pagination': pagination } check_data = { 'q': h.build_q(filter, sort, pagination), 'results_per_page': pagination.limit } uri = "{}/{}".format( self.test_config['name'], inflection.underscore(test_params['model_name'])) a.register_uri('GET', uri, json=test_data) h.get_list(**test_params) chunks = urlparse(a.last_request.url) query_vars = parse_qs(chunks.query) assert all( json.loads(query_vars[k][0]) == v for k, v in check_data.items())
def get(self): # b = Microwave.query.get_or_404(1) # # return jsonify(microwave_schema(b)) # b_json = _json_.dumps(b, cls=JSONEncoder) # print(type(b_json)) # str # c_json = _json_.loads(b_json) # print(type(c_json)) # dict # return c_json parser = reqparse.RequestParser() parser.add_argument('page', type=int, help='Page error') args = parser.parse_args() print(args) print(Microwave.query.first()) a = Microwave.query.filter(Microwave.station_id == '54511', Microwave.datatype == '11').paginate( page=args["page"], per_page=2) a_items = a.items a_json = _json_.dumps(a_items, cls=JSONEncoder) a_jsons = _json_.loads(a_json) return a_jsons
def get(self): # b = Microwave.query.get_or_404(1) # # return jsonify(microwave_schema(b)) # b_json = _json_.dumps(b, cls=JSONEncoder) # print(type(b_json)) # str # c_json = _json_.loads(b_json) # print(type(c_json)) # dict # return c_json parser = reqparse.RequestParser() parser.add_argument('time_down', type=str, help='Time_down error') parser.add_argument('time_up', type=str, help='Time_up error') args = parser.parse_args() print(args) print(Microwave.query.first()) print(type(args['time_down'])) a = Microwave.query.filter( Microwave.station_id == '54511', Microwave.datatype == '11', Microwave.datetime >= args['time_down'], Microwave.datetime <= args['time_up']).all() a_json = _json_.dumps(a, cls=JSONEncoder) a_jsons = _json_.loads(a_json) return a_jsons
def _loads(s, **kw): if isinstance(s, bytes): s = s.decode(kw.pop('encoding', None) or 'utf-8') return _json.loads(s, **kw)
def root(): global ALIST_PREFIX if ALIST_PREFIX is None: ALIST_PREFIX = current_app.config['CACHE_KEY_PREFIX'] + "alist_" if request.method == "GET": keys = ['timestamp', 'nonce', 'signature', 'echostr'] fields = tuple(map(request.args.get, keys)) if any(f is None for f in fields): abort(404) timestamp, nonce, signature, echostr = fields raw = ''.join(sorted([TOKEN, timestamp, nonce])) if xSHA1(raw) != signature: raise WxbotAuthFailed("Verification Error !") return echostr elif request.method == "POST": keys = ['signature', 'nonce', 'openid', 'timestamp'] if any(k not in request.args for k in keys): abort(404) msg = receive.parse_message(request.stream.read()) no_reply = 'success' if isinstance(msg, receive.Message): to_user = msg.FromUserName from_user = msg.ToUserName message_type = msg.MsgType if message_type == 'text': content = msg.Content.strip() if content == '[Unsupported Message]': return no_reply ## syntax: Q [any] mat = re_Q.match(content) if mat is None: return no_reply keyword = mat.group(1) keyword = keyword.strip() if keyword is not None else None if keyword is None or len(keyword) == 0: return reply.TextMessage(to_user, from_user, text.Q_INTRO) ## syntax: Q [index] mat = re_index.match(keyword) if mat is not None: ix = int(mat.group(1)) - 1 if ix < 0: return reply.TextMessage(to_user, from_user, text.INDEX_OUT_OF_RANGE) alist = RedisList(ALIST_PREFIX + to_user) article = alist[ix] if article is None: return reply.TextMessage(to_user, from_user, text.INDEX_OUT_OF_RANGE) alist.expires(ALIST_EXPIRES) article = json.loads(article) return reply.ArticleMessage(to_user, from_user, [article]) ## syntax: Q [keywords] mat = re_date.match(keyword) if mat is None: sbq = db.session.\ query( Article.aid, db.fts_match( Article.ix_text, keyword, db.fts_match.BOOLEAN ).label('score') ).\ order_by( db.desc('score'), Article.masssend_time.desc(), Article.idx.asc() ).\ limit(SEARCH_RESULT_COUNT).\ subquery() articles = db.session.\ query( Article.title, Article.digest, Article.cover_url, Article.content_url, ).\ join(sbq, sbq.c.aid == Article.aid).\ order_by( sbq.c.score.desc(), Article.masssend_time.desc(), Article.idx.asc() ).\ all() if len(articles) == 0: return reply.TextMessage(to_user, from_user, text.NO_ARTICLE_MATCHED) alist = RedisList(ALIST_PREFIX + to_user) alist.clear() if len(articles) == 1: return reply.ArticleMessage(to_user, from_user, articles) content = '\n'.join("(%d) %s" % (ix + 1, a.title) for (ix, a) in enumerate(articles)) articles = [json.dumps(a._asdict()) for a in articles] alist.append(*articles) alist.expires(ALIST_EXPIRES) return reply.TextMessage(to_user, from_user, content) ## syntax: Q[YYMMDD] / Q[YYMM] if mat.group(1) is not None: year, month, day = map(int, mat.group(1, 2, 3)) elif mat.group(4) is not None: year, month = map(int, mat.group(4, 5)) day = None else: return no_reply try: st, ed = get_time_range(year, month, day) except ValueError as e: return reply.TextMessage(to_user, from_user, text.INVALID_DATE) ## syntax: Q [YYMMDD] if mat.group(1) is not None: sbq = db.session.\ query(Article.aid).\ filter(Article.masssend_time.between(st, ed)).\ order_by( Article.masssend_time.desc(), Article.idx.asc() ).\ subquery() articles = db.session.\ query( Article.title, Article.digest, Article.cover_url, Article.content_url ).\ join(sbq, sbq.c.aid == Article.aid).\ order_by( Article.masssend_time.desc(), Article.idx.asc() ).\ all() if len(articles) == 0: return reply.TextMessage(to_user, from_user, text.NO_ARTICLE_ON_THIS_DAY) alist = RedisList(ALIST_PREFIX + to_user) alist.clear() if len(articles) == 1: return reply.ArticleMessage(to_user, from_user, articles) content = '\n'.join("(%d) %s" % (ix + 1, a.title) for (ix, a) in enumerate(articles)) articles = [json.dumps(a._asdict()) for a in articles] alist.append(*articles) alist.expires(ALIST_EXPIRES) return reply.TextMessage(to_user, from_user, content) ## syntax: Q [YYMM] else: articles = db.session.\ query( Article.title, Article.masssend_time ).\ filter(Article.masssend_time.between(st, ed)).\ order_by( Article.masssend_time.desc(), Article.idx.asc() ).\ all() if len(articles) == 0: return reply.TextMessage(to_user, from_user, text.NO_ARTICLE_IN_THIS_MONTH) articles = [ (time.strftime("%m-%d", time.localtime(a.masssend_time)), a.title) for a in articles ] content = '\n'.join("%s %s" % a for a in articles) return reply.TextMessage(to_user, from_user, content) elif isinstance(msg, receive.Event): to_user = msg.FromUserName from_user = msg.ToUserName event_type = msg.Event if event_type == 'subscribe': return reply.TextMessage(to_user, from_user, text.WELCOME) if event_type == 'CLICK': content = EVENT_REPLY_MAP.get(msg.EventKey) if content is None: return no_reply return reply.TextMessage(to_user, from_user, content) return no_reply else: abort(405)
def __init__(self, *args, **kwargs): if args and isinstance(args[0], str): obj = dict(**json.loads(args[0])) else: obj = dict(*args, **kwargs) super().__init__(obj)
def test_build_query_values(self, restrictions): res = self.obj.build_query(*restrictions) assert json.loads(res['q']) == self.obj.build_q(*restrictions) \ and res['results_per_page'] == restrictions[2].limit
def test_json_init(self, initial_value): check_data = json.loads(initial_value) obj = JsonDict(initial_value) assert obj == check_data