def post(self,game_session): self.game_session=game_session game=sessions[game_session] uid=self.get_secure_cookie("uid") try: index=json.loads(self.get_argument("i",None)) except: self.finish() return if(not index): if(not game["users"][uid]): game["user_connections"][uid]=self return self.finish({"updates":game["users"][uid]+["reconnect"]}) game["users"][uid]=[] return if(game["current_uid"]!=uid): #ok update game state and send the status update for the other user and keep this user in wait mode if(game["state"][index[0]][index[1]]!=False): self.finish({"updates":["your move"]}) return game["state"][index[0]][index[1]]=uid uid_other=game["current_uid"] game["user_connections"][uid]=self game["current_uid"]=uid try: game["user_connections"][uid_other].finish({'updates':["update "+json.dumps(index),"your move"]}) except: game["users"][uid_other]+=["update "+json.dumps(index),"your move"] self.finish({"updates":["wait","reconnect"]}) return self.finish({"updates":["wait"]})
def runTest(self): normal = {'a' : 'b', 'c' : 'd'} self.assertEquals(yajl.dumps(normal), '{"a":"b","c":"d"}') if not is_python3(): from tests import python2 self.assertEquals(yajl.dumps(python2.IssueTwelveTest_dict), '{"a":"b","c":"d"}')
def magpie_to_json(self, var): """ One-time use. For converting MAgPIE nc4 to json. """ # for crop in ['bmg', 'bmt', 'cas', 'mai', 'mgr', 'mil', 'nut', 'oth', 'pea', 'rap', 'ric', 'sgb', 'soy', 'sug', 'sun', 'whe']: for crop in ['whe',]: d = netCDF4.Dataset(os.path.join( '..', 'data', 'netcdf', 'magpie', 'MAgPIE_LUC_for_ATLAS_illustration_{}_agg.nc4'.format(crop) )) # for irr in ['firr', 'rainf']: # d = netCDF4.Dataset(os.path.join('..', 'data', 'netcdf', 'magpie', 'MAgPIE_LUC_for_ATLAS_illustration_{}_{}.nc4'.format(crop, irr))) _v = d.variables['{}_gadm{}'.format(crop, self._adm)][:] _gi = d.variables['gadm{}_index'.format(self._adm)][:] new_data = {} for i in range(len(_gi)): new_data[str(_gi[i])] = [v - np.mean(_v[:20]) for v in _v[i].tolist()[20:]] with open('../static/json/aggr/gadm{}/magpie/{}_gadm{}.json'.format( self._adm, crop, self._adm), 'w') as f: f.write( json.dumps( { 'data': new_data, 'min': round(np.min(_v[:]), 1), 'max': round(np.max(_v[:]), 1), } ) ) trimmed = { 'data': {k: np.array(v)[:, 0].tolist() for k, v in new_data.iteritems()}, 'min': round(np.min([np.array(v)[:, 0].tolist() for k, v in new_data.iteritems()]), 1), 'max': round(np.max([np.array(v)[:, 0].tolist() for k, v in new_data.iteritems()]), 1), } with open('../static/json/aggr/gadm{}/magpie/{}_gadm{}_home.json'.format(self._adm, crop, self._adm), 'w') as f: f.write(json.dumps(trimmed))
def _load_friend(self, email, passwd): res = msn_friend_get(email, passwd) if res: invite_email_new(self.current_user_id, CID_MSN, res) return self.finish(jsonp(self, dumps({'error':False, 'next':invite_user_id_by_cid(self.current_user_id, CID_MSN)}))) else: return self.finish(jsonp(self, dumps({'error':'邮箱或密码错误'})))
def api(self, **kwargs): self.content_type = 'text/plain' return '{}' if kwargs.get('encode'): return json.dumps(self.encode_json(**kwargs)) if kwargs.get('decode'): return json.dumps(self.decode_json(**kwargs)) return '{}'
def testStringEncoding(self): log('STRING ENCODE') # It should just raise with Unicode instance #print(yajl.dumps(u'abc\u0100def')) # It inserts \xff literally, OK I guess that's fine. It's not valid utf-8 print(yajl.dumps('\x00\xff')) # mu character print(yajl.dumps('\xCE\xBC'))
def report_result(item): try: item_json = json.dumps(item) except UnicodeDecodeError: item['result'] = "FIXME: unicode decode error" item.pop('content') try: item_json = json.dumps(item) except UnicodeDecodeError: log.error("Fatal unicode decode error @ %s", item['url']) return request_manager('/report', 'PUT', item_json)
def merge(): CACHE_PATH = "/home/work/wanfang/tag" for pos, i in enumerate(glob(CACHE_PATH+"/*")): for word, topic_freq in tofromfile.fromfile(i).iteritems(): if len(word.strip()) <= 3: continue word = name_tidy(word) s = [word] for topic, freq in topic_freq.iteritems(): topic = int(topic) s.append((topic, freq)) print dumps(s)
def merge(): CACHE_PATH = "/home/work/wanfang/tag" for pos, i in enumerate(glob(CACHE_PATH + "/*")): for word, topic_freq in tofromfile.fromfile(i).iteritems(): if len(word.strip()) <= 3: continue word = name_tidy(word) s = [word] for topic, freq in topic_freq.iteritems(): topic = int(topic) s.append((topic, freq)) print dumps(s)
def insert(title, tags, content, author, rating, url, reply_list, pic_list): key = url_key_builder(url) if not Spider.get(url_hash=key): entry = Spider() entry.title = title entry.tags = dumps(tags) entry.content = content entry.author = author entry.rating = rating entry.url_hash = key entry.url = url entry.url_hash = key entry.reply_list = dumps(reply_list) entry.pic_list = dumps(pic_list) entry.save()
def collect(self, port): while True: req = port.read() if not req: break if req == "?": self.ggroup.map(self.pingall, self.telescreens.iterkeys()) port.write(json.dumps(self.telescreens.keys())) continue if req == "*": want_list = self.telescreens.iterkeys() else: want_list = json.loads(req) rep = self.ggroup.map(self.fetch_state, want_list) port.write(json.dumps({k: v for k, v in rep if v}))
def _load_friend(self, email, passwd): res = msn_friend_get(email, passwd) if res: invite_email_new(self.current_user_id, CID_MSN, res) return self.finish( jsonp( self, dumps({ 'error': False, 'next': invite_user_id_by_cid(self.current_user_id, CID_MSN) }))) else: return self.finish(jsonp(self, dumps({'error': '邮箱或密码错误'})))
def testLong(self): ''' http://github.com/rtyler/py-yajl/issues#issue/10 ''' if is_python3(): return data = {long(1) : 2} result = yajl.loads(yajl.dumps(data)) self.assertEquals({'1': 2}, result)
def runTest(self): ''' http://github.com/rtyler/py-yajl/issues#issue/8 ''' encoded = yajl.dumps([(2,3,)]) decoded = yajl.loads(encoded) self.assertEquals(len(decoded), 1) self.assertEquals(decoded[0][0], 2) self.assertEquals(decoded[0][1], 3)
async def get_handler(request): result = list() async with ClientSession() as session: for repository in config['repositories']: url = api_url + repository + '/pulls?access_token=' + token try: async with session.get(url) as response: status = response.status text = loads(await response.text()) except ClientConnectorError: web.Response(text='Checking' + repository + 'failed due to the connection problem\n', status=502) except TimeoutError: web.Response(text='Checking' + repository + 'failed due to the timeout\n', status=504) except Exception as exception: error('check, %s', exception) return web.HTTPInternalServerError() if status != 200: return web.Response(text='Checking' + repository + 'failed due to the: ' + text['message'] + '\n', status = status) for item in text: if item['state'] != 'closed': result.append(repository) if len(result) > 0: return web.Response(text=dumps({'message': 'Test failed', 'repositories': result}, indent=4)) else: return web.HTTPOk()
def get(self, id): current_user_id = self.current_user_id result = None if not id.isdigit(): id = id_by_url(id) if id: id = int(id) if not id or current_user_id == id: return self.finish('null') zsite = Zsite.mc_get(id) if zsite: career = career_current(id) career = filter(bool, career) current_user_id = self.current_user_id if current_user_id != id: if follow_get(current_user_id, id): word = '淡忘' else: word = '关注' result = [ zsite.name, ' , '.join(career), ico_url_with_default(id), zsite.link, zsite.id, word, motto_get(zsite.id) ] return self.finish(dumps(result))
def post(self): result = None current_user_id = self.current_user_id txt = self.get_argument('txt', None) if txt: host = self.request.host zsite = zsite_by_domain(host) if zsite and zsite.cid == CID_SITE: zsite_id = zsite.id else: zsite_id = 0 m = po_word_new(current_user_id, txt, zsite_id=zsite_id) if not zsite_id and m: c_dict = career_dict(set([current_user_id])) unit, title = c_dict[current_user_id] result = [ [ 1, zsite.name, zsite.link, unit, title, pic_url_with_default(current_user_id, '219'), [[m.id, [], 0, 61, 0, 0, 0, time.time(), None, txt_withlink(txt), False]] ], [] ] self.finish(dumps(result))
def get(self, id): # data = { # "zsite":{ # "name":"w", # "unit":"xx", # "title":"zz" # }, # "name":"2011年第2次BPUG活动", # "id":1234, # "fav":True, # "reply_count":1, # "tag_id":232, # "tag_name":"sss" # } po = Po.mc_get(id) user = po.user result = [id] result.extend(feed_tuple_by_db(id)) result.pop() result.pop() result.append(po.htm) zsite = [user.name, user.link] zsite.extend(career_current(po.user_id)) result.append(zsite) self.finish(dumps(result))
def fetch(self, want="*"): if want == "*": req = "*" else: req = json.dumps(want) self.port.write(req) return json.loads(self.port.read())
async def handler(request): path = request.raw_path method = request.method reply = {'method': method, 'path': path} ip = request.headers.get('X-Forwarded-For') if ip: reply['ip'] = ip host = request.headers.get('Host') if host is None: request.headers.get('X-Forwarded-Host') if host: reply['host'] = host if tag: reply['tag'] = tag headers = dict() for header in [ 'X-Forwarded-Port', 'X-Forwarded-Proto', 'X-Forwarded-Agent', 'X-Forwarded-Request', 'X-Amzn-Trace-Id' ]: result = request.headers.get(header, None) if result: headers[header] = result if len(headers) != 0: reply['headers'] = headers return dumps(reply, indent=4)
async def download_one(pair, year, month, day, hour, session, sem): url = url_template.format(pair, year, month, day, hour) data = list() async with sem: async with session.get(url) as response: content = await response.read() if response.status not in http_ok: print( f'Scraping {url} failed due to the return code {response.status}' ) return if content == b'': print(f'Scraping {url} failed due to the empty content') return print(f'Scraping {url} succeeded') with lzma_open(BytesIO(content)) as f: while True: chunk = f.read(chunk_size) if chunk: data.append(unpack(fmt, chunk)) else: break async with AIOFile( f'{store_path}/{pair}-{year}-{month}-{day}-{hour}.bi5', 'w') as fl: await fl.write(dumps(data, indent=4)) return
def runTest(self): dumpable = [11889582081] rc = yajl.dumps(dumpable) self.assertEquals(rc, '[11889582081]') rc = yajl.loads(rc) self.assertEquals(rc, dumpable)
def post(self): sort_by_time, sort_by_country = Activity.time_country(self) self.render( dumps({ 'sort_by_time': sort_by_time, 'sort_by_country': sort_by_country }))
def post(self): result = None current_user_id = self.current_user_id txt = self.get_argument('txt', None) if txt: host = self.request.host zsite = zsite_by_domain(host) if zsite and zsite.cid == CID_SITE: zsite_id = zsite.id else: zsite_id = 0 m = po_word_new(current_user_id, txt, zsite_id=zsite_id) if not zsite_id and m: c_dict = career_dict(set([current_user_id])) unit, title = c_dict[current_user_id] result = [[ 1, zsite.name, zsite.link, unit, title, pic_url_with_default(current_user_id, '219'), [[ m.id, [], 0, 61, 0, 0, 0, time.time(), None, txt_withlink(txt), False ]] ], []] self.finish(dumps(result))
def DeleteAllDatasetsFromOrg(organization, hdx_site, apikey, verbose=True): '''Delete all datasets owned by an organization.''' if verbose: print "--------------------------------------------------" print "//////////////////////////////////////////////////" print "--------------------------------------------------" print "////////////// DELETING DATASETS /////////////////" print "--------------------------------------------------" print "//////////////////////////////////////////////////" print "--------------------------------------------------" # Checking for input. if (organization is None): print "No organization id provided. Please provide an organization id." print "--------------------------------------------------" return False # Base config. organization_show_url = hdx_site + '/api/action/organization_show?id=' package_delete_url = hdx_site + '/api/action/package_delete' headers = { 'X-CKAN-API-Key': apikey, 'content-type': 'application/json' } # Fetching dataset information. dataset_dict = requests.get(organization_show_url + organization, headers=headers, auth=('dataproject', 'humdata')).json() # # Progress bar. # i = 0 widgets = [I('prompt_bullet'), ' Deleting resources:', pb.Percentage(), ' ', pb.Bar('-'), ' ', pb.ETA(), ' '] pbar = pb.ProgressBar(widgets=widgets, maxval=len(dataset_dict)).start() # # Iterating over every dataset. # if dataset_dict["success"] is True: pbar.update(i) for dataset in dataset_dict["result"]["packages"]: u = { 'id': dataset["id"] } r = requests.post(package_delete_url, data=json.dumps(u), headers=headers, auth=('dataproject', 'humdata')) if r.status_code != 200: print "%s : %s" % (I('prompt_error'), dataset["name"]) else: print "%s : %s" % (I('prompt_success'), dataset["name"]) i += 1 else: print "%s There was an error getting the dataset list." % I('prompt_error') print "--------------------------------------------------" return False
def _handle(self, environ, start_response): segments = filter(None, environ['PATH_INFO'].split('/')) params = cgi.parse_qs( environ.get('QUERY_STRING', ''), keep_blank_values=True) roomname = segments[0] username = environ['auth'][0] yak = client.Yakity(self._config, self._junction_client, username) if len(segments) == 1: if environ['REQUEST_METHOD'] == 'GET': last_seen = int(self._get_param('last', params) or 0) result = yak.read(roomname, last_seen) for r in result: r['timestamp'] = time.ctime(r['timestamp'] + timediff) return json.dumps(result) if environ['REQUEST_METHOD'] == 'POST': msg = environ['wsgi.input'].read() yak.say(roomname, msg) return '{"success":true}' if segments[1] == 'join': yak.join(roomname) return '{"success":true}' if segments[1] == 'depart': yak.depart(roomname) return '{"success":true}'
def _call(self, func_name, o): if func_name.startswith("_") or func_name in self.DISABLED: raise HTTPError(400) func = getattr(self, func_name, None) if func is None: raise HTTPError(501) if o is not None: o = loads(o) type_o = type(o) if type_o is list: r = func(*o) elif type_o is dict: r = func(**o) else: r = func(o) else: r = func() if r is None: chunk = '{}' elif isinstance(r, Err) and r: chunk = '{"err":%s}' % str(r) elif isinstance(r, JsOb): chunk = str(r) else: chunk = dumps(r) return chunk
def charged(out_trade_no, total_fee, rid, d): id, vid, ck = out_trade_no.split('_', 2) user_id, vcid = verifyed(vid, ck) if vcid == CID_VERIFY_MONEY: t = Trade.get(id) if t and t.to_id == user_id and t.rid == rid and t.value + t.tax == int( float(total_fee) * 100): if t.state == TRADE_STATE_ONWAY: trade_finish(t) trade_log.set(id, dumps(d)) for_t = Trade.get(t.for_id) if for_t: if bank_can_pay(for_t.from_id, for_t.value): for_cid = for_t.cid if for_cid == CID_TRADE_PAY: trade_finish(for_t) pay_notice(for_t.id) elif for_cid == CID_TRADE_EVENT: from event import event_joiner_state, event_joiner_new, EVENT_JOIN_STATE_NEW event_id = for_t.rid user_id = for_t.from_id if event_joiner_state( event_id, user_id) < EVENT_JOIN_STATE_NEW: trade_open(for_t) event_joiner_new(event_id, user_id) else: trade_fail(for_t) return t return for_t return t
def _call(self, func_name, o): if func_name.startswith("_") or func_name in self.DISABLED: raise HTTPError(400) func = getattr(self,func_name, None) if func is None: raise HTTPError(501) if o is not None: o = loads(o) type_o = type(o) if type_o is list: r = func(*o) elif type_o is dict: r = func(**o) else: r = func(o) else: r = func() if r is None: chunk = '{}' elif isinstance(r, Err) and r: chunk = '{"err":%s}'%str(r) elif isinstance(r, JsOb): chunk = str(r) else: chunk = dumps(r) return chunk
def charged(out_trade_no, total_fee, rid, d): id, vid, ck = out_trade_no.split('_', 2) user_id, vcid = verifyed(vid, ck) if vcid == CID_VERIFY_MONEY: t = Trade.get(id) if t and t.to_id == user_id and t.rid == rid and t.value + t.tax == int(float(total_fee)*100): if t.state == TRADE_STATE_ONWAY: trade_finish(t) trade_log.set(id, dumps(d)) for_t = Trade.get(t.for_id) if for_t: if bank_can_pay(for_t.from_id, for_t.value): for_cid = for_t.cid if for_cid == CID_TRADE_PAY: trade_finish(for_t) pay_notice(for_t.id) elif for_cid == CID_TRADE_EVENT: from event import event_joiner_state, event_joiner_new, EVENT_JOIN_STATE_NEW event_id = for_t.rid user_id = for_t.from_id if event_joiner_state(event_id, user_id) < EVENT_JOIN_STATE_NEW: trade_open(for_t) event_joiner_new(event_id, user_id) else: trade_fail(for_t) return t return for_t return t
def main(args): try: ecr = client('ecr', region_name=args.region, aws_access_key_id=args.key_id, aws_secret_access_key=args.key) except exceptions.ClientError: raise images = ecr.list_images(registryId=args.registry, repositoryName=args.repository, filter={'tagStatus': 'UNTAGGED'}) for image in images['imageIds']: res = ecr.batch_delete_image(registryId=args.registry, repositoryName=args.repository, imageIds=[{ 'imageDigest': image['imageDigest'] }]) print( dumps( { 'image': image['imageDigest'], 'status': res['ResponseMetadata']['HTTPStatusCode'] }, indent=4))
def Compile(obj): if isinstance(obj,str): assert obj.endswith('.json'),'Config type not currently supported' obj = json.load(open(obj,'r')) layers = [] COMPONENTS = {} if 'IMPORTS' in obj: assert isinstance(obj['IMPORTS'],list), 'BLOX IMPORTS must be a string' for f in obj['IMPORTS']: if isinstance(f,str): if f.endswith('.blx') or f.endswith('.json'): _obj = json.load( open(f,'r') ) obj_name = _obj['Name'] if 'Name' in _obj else ''.join(f.split('.')[:-1]) COMPONENTS.update( { obj_name:Compile( _obj ) } ) else: COMPONENTS.update( load( f ) ) # USER_DEFINED.update( load( f ) ) if 'DEFS' in obj: for k,fs in obj['DEFS']['BLOX'].items():COMPONENTS.update( {k: nn.Sequential(*[handlers[k](v) for f in fs for k,v in f.items() ]) } ) # for k,fs in obj['DEFS'].items():COMPONENTS.update( {k: nn.Sequential(*[handlers[k](v) for f in fs for k,v in f.items() ]) } ) if 'BLOX' in obj: for layer in obj['BLOX']: if isinstance(layer,str): f = layer print(layer,json.dumps(list(PREDEFINED.keys()),indent=2)) if f in COMPONENTS: funcs = COMPONENTS[f] elif f in PREDEFINED: funcs = PREDEFINED[f]() else: raise ValueError('Function Block not defined in config file. Error @ {}'.format(f)) layers.append( funcs ) else: if 'MODULE' in layer: layers.append(PREDEFINED[layer['MODULE']['LAYER']](**layer['MODULE']['ARGS']) if isinstance(layer['MODULE'],dict) else PREDEFINED[layer['MODULE']]() ) elif 'DEF' in layer: f = layer['DEF'] # if the block is defined in another file,load and continue if f.endswith('.json') or f.endswith('.blx'): funcs = Compile( json.load( open(f,'r') ) ) # check to see if the block is previously defined elif f in COMPONENTS: funcs = COMPONENTS[f] elif f in PREDEFINED: funcs = PREDEFINED[f]() else: raise ValueError('Function Block not defined in config file. Error @ {}'.format(f)) layers.append( funcs ) elif 'REPEAT' in layer: b = layer['REPEAT']['BLOCK'] t = layer['REPEAT']['REPS'] layers.append( nn.Sequential(*[ c for _ in range(t) for c in COMPONENTS[b] ]) if isinstance(COMPONENTS[b],list) else COMPONENTS[b] ) else: for k,v in layer.items(): layers.append( handlers[k](v) ) return nn.Sequential(*layers).cuda() if torch.cuda.is_available() else nn.Sequential(*layers)
def server(request): handler_map = urls.get(request.path) if not handler_map: # default behaviour is 404 to all unknown URLs raise webob.exc.HTTPNotFound() auth_error = check_auth(request) if auth_error: log.info("auth: " + auth_error) raise webob.exc.HTTPUnauthorized(auth_error) with manager_pool.item() as manager: handler = method_dispatcher(**dict( (method, getattr(manager, name)) for method,name in handler_map.iteritems() )) result = handler(request) response = Response(json.dumps(result), content_type='application/json') if not response.etag and (200 <= response.status_int < 300): # generate Etag from URL and response.body sha256_tag = hashlib.sha256(request.path + response.body).digest() response.etag = b64encode(sha256_tag) if 'gzip' in request.accept_encoding and len(response.body) > MIN_COMPRESS_LENGTH: # client supports gzipped answer and response is reasonably long for compression benefits response.content_encoding = 'gzip' response.body = gzip_string(response.body) return response
def add_gadm1_codes_to_gadm1_json(self): """ Add GADM level 1 information to level 1 map outlines from GADM. """ with open('../static/topojson/gadm1_map.json') as f: ga = json.loads(f.read()) ga['objects']['regions'] = ga['objects']['_bejeezus'] ga['objects'].pop('_bejeezus', None) for region in ga['objects']['regions']['geometries']: props = region['properties'] try: region['properties']['iso'] = region['properties']['ISO'] region['properties']['adm1'] = '{0:02d}'.format(region['properties']['ID_1']) region['properties']['adm0'] = '{0}'.format(region['properties']['ID_0']) region['properties']['adm'] = '{0}{1:02d}'.format(region['properties']['ID_0'], region['properties']['ID_1']) region['properties']['name'] = region['properties']['NAME_1'] region['properties']['country'] = region['properties']['NAME'] region['properties'].pop('ISO', None) region['properties'].pop('ID_1', None) region['properties'].pop('ID_0', None) region['properties'].pop('ENGTYPE_1', None) region['properties'].pop('TYPE_1', None) region['properties'].pop('NL_NAME_1', None) region['properties'].pop('VARNAME_1', None) except: pass with open('../static/topojson/atlas_gadm1.json', 'w') as f: f.write(json.dumps(ga)) return ga
def add_gadm1_codes_to_ne1_json(self): """ Add GADM level 1 information to level 1 map outlines from Natural Earth. """ gadm = pd.DataFrame.from_csv('./gadm1.meta.csv', index_col=4) gadm.index = np.arange(len(gadm)) with open('../static/topojson/ne1_s0001.json') as f: ne = json.loads(f.read()) for region in ne['objects']['regions']['geometries']: props = region['properties'] try: country = pycountry.countries.get(alpha2=props['iso_a2']) region['properties']['iso'] = country.alpha3 id0 = gadm.ix[gadm['ISO'] == country.alpha3].\ ix[gadm['NAME'] == props['name'].encode('latin_1')].ID_0.values[0] id1 = gadm.ix[gadm['ISO'] == country.alpha3].\ ix[gadm['NAME'] == props['name'].encode('latin_1')].ID_1.values[0] region['properties']['adm1'] = '{0:02d}'.format(id1) region['properties']['adm0'] = '{0}'.format(id0) region['properties']['adm'] = '{0}{1:02d}'.format(id0, id1) except: pass with open('../static/topojson/atlas_gadm1.json', 'w') as f: f.write(json.dumps(ne)) return ne
def test_chinese(self): ''' Testing with simplified chinese for http://github.com/rtyler/py-yajl/issues/#issue/7 ''' char = '\u65e9\u5b89, \u7238\u7238' if not is_python3(): from tests import python2 char = python2.IssueSevenTest_chinese_char out = yajl.dumps(char).lower() self.assertEquals(out, '"\\u65e9\\u5b89, \\u7238\\u7238"') out = yajl.dumps(out).lower() self.assertEquals(out, '"\\"\\\\u65e9\\\\u5b89, \\\\u7238\\\\u7238\\""') out = yajl.loads(out) self.assertEquals(out, '"\\u65e9\\u5b89, \\u7238\\u7238"') out = yajl.loads(out) self.assertEquals(out, char)
def finish(self, chunk={}): callback = self.get_argument('callback', None) if callback: if type(chunk) is dict: chunk = dumps(chunk) chunk = '%s(%s)' % (callback, chunk) self.set_header('Content-Type', 'application/json; charset=UTF-8') super(JsonErrView, self).finish(chunk)
def get(self): q = self.get_argument('q',None) if q: result = dumps((int(i[0]),int(i[1]),escape(i[2])) for i in autocomplete_tag.id_rank_name_list_by_str(q)) else: result = [] self.finish(jsonp(self,result))
def testDict(self): log('DICT') d = {"bool": False, "int": 42, "float": 3.14, "string": "s"} print(yajl.dumps(d)) s = '{"bool": false, "int": 42, "float": 3.14, "string": "s"}' print(yajl.loads(s)) log('')
def get(self): email = self.get_argument('email', None) passwd = self.get_argument('passwd', None) url = 'http://%s.%s' % (self.current_user_id, SITE_DOMAIN) if email and passwd: thread.start_new_thread(self._load_friend, (email, passwd)) else: return self.finish(jsonp(self, dumps({'error': '输入正确的邮箱和密码'})))
def testFloat(self): log('FLOAT') print(yajl.dumps(123.4)) # Bug fix over latest version of py-yajl: a lone float decodes decoded = yajl.loads('123.4') self.assertEqual(123.4, decoded) log('')
def get(self): q = self.get_argument('q',None) if q: result = autocomplete(q) result = dumps(result) else: result = [] self.finish(jsonp(self,result))
def _delete_old(old_array): # # Deleting old gallery items. # for result in old_array["result"]: u = { 'id': result["id"] } requests.post(related_delete_url, data=json.dumps(u), headers=headers, auth=('dataproject', 'humdata')) if verbose: print "%s Existing gallery item. Deleting. %s" % (I('prompt_warn'), result["id"])
def render(self, chunk): if chunk: if type(chunk) is dict: chunk = dumps(chunk) chunk = '{"err":%s}' % chunk else: chunk = '{}' self.set_header('Content-Type', 'application/json; charset=UTF-8') self.finish(chunk)
def add_test(v): # These modules have a few round-tripping problems... try: assert cjson.decode(cjson.encode(v)) == v assert yajl.loads(yajl.dumps(v)) == v except Exception: pass else: TESTS.append((v,tnetstring.dumps(v),cjson.encode(v)))
def post(self, id=0): #USER DUMPS FIX HEADER FOR FIREFOX if id: id = int(id) r = self._post(id) if isinstance(r, (int, long)): r = {'status':r} r = dumps(r) self.finish(r)
async def handle(request): reply = { "entities_url": "/v2/entities", "types_url": "/v2/types", "subscriptions_url": "/v2/subscriptions", "registrations_url": "/v2/registrations" } return web.Response(text=dumps(reply, indent=4), content_type='application/json')
def post(self, id=0): #USER DUMPS FIX HEADER FOR FIREFOX if id: id = int(id) r = self._post(id) if isinstance(r, (int, long)): r = {'status': r} r = dumps(r) self.finish(r)
def dumps(self, data): # don't serialize strings if isinstance(data, string_types): return data try: return json.dumps(data, default=self.default) except (ValueError, TypeError) as e: raise SerializationError(data, e)
def dumps_cookiejar(jar): r = [] for k in jar: v = {} for i in COOKIE_ATTR: s = getattr(k, i, None) if s is not None: v[i] = s r.append(v) return dumps(r)
def get(self, id): po = Po.mc_get(id) user_id = self.current_user_id result = [] if po and po.can_view(user_id): for reply in po.reply_list(): user = reply.user result.append((url_or_id(user.id), reply.htm, user.name)) return self.finish(dumps(result))