def test_precise_float(): f = "1.234567890E+34" f1 = "1.2345678900000002e+34" assert rapidjson.loads(f) == float(f) assert rapidjson.loads(f, precise_float=True) == float(f) assert rapidjson.loads(f, precise_float=False) == float(f1)
def test_uuid_mode(): assert rapidjson.UUID_MODE_NONE == 0 assert rapidjson.UUID_MODE_CANONICAL == 1 assert rapidjson.UUID_MODE_HEX == 2 value = uuid.uuid1() with pytest.raises(TypeError): rapidjson.dumps(value) with pytest.raises(ValueError): rapidjson.dumps(value, uuid_mode=42) with pytest.raises(ValueError): rapidjson.loads('""', uuid_mode=42) dumped = rapidjson.dumps(value, uuid_mode=rapidjson.UUID_MODE_CANONICAL) loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_CANONICAL) assert loaded == value # When loading, hex mode implies canonical format loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_HEX) assert loaded == value dumped = rapidjson.dumps(value, uuid_mode=rapidjson.UUID_MODE_HEX) loaded = rapidjson.loads(dumped, uuid_mode=rapidjson.UUID_MODE_HEX) assert loaded == value
def test_datetime_mode_loads(): import pytz utc = datetime.now(pytz.utc) utcstr = utc.isoformat() jsond = rapidjson.dumps(utc, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert jsond == '"%s"' % utcstr assert rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == utc local = utc.astimezone(pytz.timezone('Europe/Rome')) locstr = local.isoformat() jsond = rapidjson.dumps(local, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert jsond == '"%s"' % locstr assert rapidjson.loads(jsond) == locstr assert rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == local load_as_utc = rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_UTC) assert load_as_utc == utc assert not load_as_utc.utcoffset() load_as_naive = rapidjson.loads(jsond, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) assert load_as_naive == local.replace(tzinfo=None)
def test_infinity(): inf = float("inf") dumped = rapidjson.dumps(inf) loaded = rapidjson.loads(dumped) assert loaded == inf d = Decimal(inf) dumped = rapidjson.dumps(inf, use_decimal=True) loaded = rapidjson.loads(dumped, use_decimal=True) assert loaded == inf
def read_prev(self): """ Read the previous line from the file, parse and return. Returns None if out of lines. """ original_pos = current_pos = self.file.tell() # can't fall off the beginning if current_pos == 0: return None # rewind by chunk_size and read chunk_size bytes # repeat until we've found TWO \n - the end of the previous line, and the beginning of the line before the line we want # then split n grab #print(current_pos) rewound_chunk = b"" while rewound_chunk.count(b"\n") < 3: # changed from 2 to 3 to fix partial reads before_jump = current_pos # Jump backwards x bytes, and prevent falling off the start current_pos = max(0, current_pos-self.chunk_size) self.file.seek(current_pos) jumped_by = before_jump-current_pos # prepend the chunk to our buffer rewound_chunk = b''.join([self.file.read(jumped_by), rewound_chunk]) #rewound_chunk = ''.join([rewound_chunk, '|||||', self.file.read(jumped_by)]) #print("Read ", jumped_by) # If we just read from the beginning of the file this loop should break regardless if current_pos == 0: break # we have a chunk containing at least one full line # find the last line in the chunk lines_split = rewound_chunk.split(b"\n") # -1 => blank # -2 => last line emitted # -3 => previous line. wont exist if we hit BOF # -4+ => line before that and/or partial line garbage if len(lines_split) < 3: self.line = 0 self.file.seek(0) return json.loads(self.decode(lines_split[0])) prev_line = lines_split[-2] # Calculate how far backwards we jumped, seek to the beginning of the line we're returning # TODO should it be elsewhere so if next_line is called we dont get this line again? after_prev_line = lines_split[-1:] rewound_len = len(b"\n".join([prev_line] + after_prev_line)) self.file.seek(original_pos - rewound_len) self.line -= 1 return json.loads(self.decode(prev_line))
def test_datetime_mode_dumps(): import pytz assert rapidjson.DATETIME_MODE_NONE == 0 assert rapidjson.DATETIME_MODE_ISO8601 == 1 assert rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ == 2 assert rapidjson.DATETIME_MODE_ISO8601_UTC == 3 d = datetime.utcnow() dstr = d.isoformat() with pytest.raises(TypeError): rapidjson.dumps(d) with pytest.raises(ValueError): rapidjson.dumps(d, datetime_mode=42) with pytest.raises(ValueError): rapidjson.loads('""', datetime_mode=42) with pytest.raises(TypeError): rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_NONE) assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == '"%s"' % dstr assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) == '"%s"' % dstr d = d.replace(tzinfo=pytz.utc) dstr = utcstr = d.isoformat() assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == '"%s"' % dstr assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) == '"%s"' % dstr[:-6] d = d.astimezone(pytz.timezone('Pacific/Chatham')) dstr = d.isoformat() assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == '"%s"' % dstr assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) == '"%s"' % dstr[:-6] d = d.astimezone(pytz.timezone('Asia/Kathmandu')) dstr = d.isoformat() assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == '"%s"' % dstr assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) == '"%s"' % dstr[:-6] d = d.astimezone(pytz.timezone('America/New_York')) dstr = d.isoformat() assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) == '"%s"' % dstr assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_IGNORE_TZ) == '"%s"' % dstr[:-6] assert rapidjson.dumps(d, datetime_mode=rapidjson.DATETIME_MODE_ISO8601_UTC) == '"%s"' % utcstr
def test_nan(): nan = float("nan") dumped = rapidjson.dumps(nan) loaded = rapidjson.loads(dumped) assert math.isnan(nan) assert math.isnan(loaded) d = Decimal(nan) dumped = rapidjson.dumps(nan, use_decimal=True) loaded = rapidjson.loads(dumped, use_decimal=True) assert math.isnan(d) assert math.isnan(loaded)
def getAddy(apn): # cleanup apn by removing dashes apn = apn.replace("-","") # make http call and assign response r = http.request('GET', url + apn) # pull data out of response data = r.data.decode('utf-8') # convert data to json dictionary d = rapidjson.loads(data) # sometimes there is no data, so write 'apn, NA, 0, 0' if len(d) < 1: print("\n", apn, ",NA", sep='') s = (apn,"NA",0,0) return s # otherwise there's data a = d[0] #address info c = d[1]['geometry']['coordinates'] # coordinate list pt = c[0][0] # first coordinate #debug: print(apn,",",a['from_st']," ",a['street'],",",pt[0],",",pt[1], sep='') #a['st_type']) if 'from_st' in a: s = (apn,a['from_st']+" "+a['street'],pt[0],pt[1]) print('.', end='', flush=True) #print(s) else: s = (apn,"NA",0,0) return s
def test_unicode(self): text = '"は"' rapid_ret = rapidjson.loads(text) std_ret = json.loads(text) self.assertEqual(std_ret, u"は") self.assertEqual(rapid_ret, u"は") self.assertEqual(std_ret, rapid_ret)
def test_datetime_values(value): with pytest.raises(TypeError): rapidjson.dumps(value) dumped = rapidjson.dumps(value, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) loaded = rapidjson.loads(dumped, datetime_mode=rapidjson.DATETIME_MODE_ISO8601) assert loaded == value
def get_input_condition(bigchain, fulfillment): """ Args: bigchain: fulfillment: Returns: """ input_tx = fulfillment['input'] # if `TRANSFER` transaction if input_tx: # get previous condition previous_tx = bigchain.get_transaction(input_tx['txid']) conditions = sorted(previous_tx['transaction']['conditions'], key=lambda d: d['cid']) return conditions[input_tx['cid']] # if `CREATE` transaction # there is no previous transaction so we need to create one on the fly else: current_owner = fulfillment['current_owners'][0] condition = cc.Ed25519Fulfillment(public_key=current_owner) return { 'condition': { 'details': rapidjson.loads(condition.serialize_json()), 'uri': condition.condition_uri } }
def main(): """ """ # Read command line arguments. args = process_command_line() # Read the project file projectFileLocation = args.project_file f = open(projectFileLocation,'r') projectFileString = f.read() projectFile = rapidjson.loads(projectFileString) if isinstance(projectFile,float): raise RuntimeError("could not load project-file json") predictor = vp.VoxelPredict(projectFile) if args.modus == 'train': predictor.doTraining() else : if args.roiBegin is None or args.roiEnd is None: predictor.predict(dataPath=args.data, dataKey=args.key, outPath=args.out) else: roiBegin = args.roiBegin roiEnd = args.roiEnd print("ROI ",roiEnd,roiBegin) predictor.predictROI(dataPath=args.data, dataKey=args.key, outPath=args.out, roiBegin=roiBegin,roiEnd=roiEnd ) return 0
def test_unicode(): arabic='بينهم ان يكون مسلما رشيدا عاقلا ًوابنا شرعيا لابوين عمانيين' chinese='本站所提供的資料和服務都不收費,因此網站所需要的資金全來自廣告及捐款。若您願意捐款補助' for text in [arabic, chinese]: dumped = rapidjson.dumps(text) loaded = rapidjson.loads(dumped) assert text == loaded
def test_doubles(): doubles = [] for x in range(100000): d = sys.maxsize * random.random() dumped = rapidjson.dumps(d) loaded = rapidjson.loads(dumped) assert loaded == d
def read_next(self): """ Read the next line from the file, parse and return. Returns None if out of lines. """ data = self.file.readline().strip() if data: self.line += 1 return json.loads(self.decode(data)) if data else None
def test_uuid_and_datetime_mode_together(): value = [date.today(), uuid.uuid1()] dumped = rapidjson.dumps(value, datetime_mode=rapidjson.DATETIME_MODE_ISO8601, uuid_mode=rapidjson.UUID_MODE_CANONICAL) loaded = rapidjson.loads(dumped, datetime_mode=rapidjson.DATETIME_MODE_ISO8601, uuid_mode=rapidjson.UUID_MODE_CANONICAL) assert loaded == value
def load_config(): cf = uwsgi.opt.get('config-file') if not cf: print("Cannot find config file: {}".format(cf)) exit(1) conf_data = json.loads(open(cf).read()) app.logger.debug('loading config from {}'.format(cf.decode('ascii', 'ignore'))) return Conf(**conf_data)
def test_use_decimal(): import math from decimal import Decimal dstr = "2.7182818284590452353602874713527" d = Decimal(dstr) with pytest.raises(TypeError): rapidjson.dumps(d) assert rapidjson.dumps(float(dstr)) == str(math.e) assert rapidjson.dumps(d, use_decimal=True) == dstr assert rapidjson.dumps({"foo": d}, use_decimal=True) == '{"foo":%s}' % dstr assert rapidjson.loads( rapidjson.dumps(d, use_decimal=True), use_decimal=True) == d assert rapidjson.loads(rapidjson.dumps(d, use_decimal=True)) == float(dstr)
def deserialize(data): """Deserialize a JSON formatted string into a dict. Args: data (str): JSON formatted string. Returns: dict: dict resulting from the serialization of a JSON formatted string. """ return rapidjson.loads(data)
def test_object_hook(): import simplejson as json def as_complex(dct): if "__complex__" in dct: return complex(dct["real"], dct["imag"]) return dct result = rapidjson.loads('{"__complex__": true, "real": 1, "imag": 2}', object_hook=as_complex) assert result == (1 + 2j)
def test_object_hook(): def as_complex(dct): if '__complex__' in dct: return complex(dct['real'], dct['imag']) return dct result = rapidjson.loads( '{"__complex__": true, "real": 1, "imag": 2}', object_hook=as_complex ) assert result == (1+2j)
def test_object_hook(): class Foo: def __init__(self, foo): self.foo = foo def hook(d): if 'foo' in d: return Foo(d['foo']) return d def default(obj): return {'foo': obj.foo} res = rapidjson.loads('{"foo": 1}', object_hook=hook) assert isinstance(res, Foo) assert res.foo == 1 assert rapidjson.dumps(rapidjson.loads('{"foo": 1}', object_hook=hook), default=default) == '{"foo":1}' res = rapidjson.loads(rapidjson.dumps(Foo(foo="bar"), default=default), object_hook=hook) assert isinstance(res, Foo) assert res.foo == "bar"
def madera(): # Download, if needed if not local_archive.exists(): download(DATASET_URL, local_archive.name) # Unpack the archive if not local_data.exists(): unpack(local_archive) geojson_file = Path(local_data, 'MADERA_COUNTY_PARCEL_LAYER.geojson') if not geojson_file.exists(): shp_to_geojson(Path(local_data, 'MADERA_COUNTY_PARCEL_LAYER.shp')) data = rapidjson.loads(open(geojson_file, 'r').read()) upload(build_documents(data))
def merced(): # Download, if needed if not local_archive.exists(): download(DATASET_URL, local_archive.name) # Unpack the archive if not local_data.exists(): unpack(local_archive) geojson_file = Path(local_data, 'parcels.geojson') if not geojson_file.exists(): shp_to_geojson(Path(local_data, 'parcels.shp')) data = rapidjson.loads(open(geojson_file, 'r').read()) upload(build_documents(data))
async def phh(message): async with httpx.AsyncClient(http2=True) as http: fetch = await http.get( "https://api.github.com/repos/phhusson/treble_experimentations/releases/latest" ) usr = json.loads(fetch.content) text = "<b>Phh's latest GSI release(s):</b>\n" for i in range(len(usr)): try: name = usr["assets"][i]["name"] url = usr["assets"][i]["browser_download_url"] text += f"<a href='{url}'>{name}</a>\n" except IndexError: continue await http.aclose() await message.reply(text)
def process_message_multistorage( message: Message[MultistorageKafkaPayload], ) -> MultistorageProcessedMessage: value = rapidjson.loads(message.payload.payload.value) metadata = KafkaMessageMetadata(message.offset, message.partition.index, message.timestamp) results: MutableSequence[Tuple[StorageKey, Union[None, BytesInsertBatch, ReplacementBatch]]] = [] for index, storage_key in enumerate(message.payload.storage_keys): result = _process_message_multistorage_work(metadata=metadata, storage_key=storage_key, storage_message=value) results.append((storage_key, result)) return results
async def phh(c: Client, update: Update): chat_id = update.chat.id fetch = get( "https://api.github.com/repos/phhusson/treble_experimentations/releases/latest" ) usr = json.loads(fetch.content) reply_text = tld(chat_id, "phh_releases") for i in range(len(usr)): try: name = usr['assets'][i]['name'] url = usr['assets'][i]['browser_download_url'] reply_text += f"[{name}]({url})\n" except IndexError: continue await update.reply_text(reply_text)
def wshandler(request): env, error = yield from get_env(request) if error: return error ws = web.WebSocketResponse() ws.start(request) request.app['sockets'].append((env.username, ws)) session = request.cookies.get('session') while True: msg = yield from ws.receive() if msg.tp == web.MsgType.text: log.debug(msg.data) data = json.loads(msg.data) payload = data.get('payload') if payload: payload = json.dumps(payload) resp = yield from aiohttp.request( 'POST' if payload else 'GET', env('host_web') + data['url'], headers={ 'X-Requested-With': 'XMLHttpRequest', 'Cookie': data['cookie'] }, data=payload, ) log.debug('%s %s', resp.status, msg.data) if resp.status == 200: p = (yield from resp.read()).decode() ws.send_str(json.dumps({'uid': data['uid'], 'payload': p})) new_session = resp.cookies.get('session') if new_session and session != new_session: session = new_session.value msg = {'session': new_session.output(header='').strip()} ws.send_str(json.dumps(msg)) log.debug('sent new session') elif msg.tp == web.MsgType.close: log.debug('ws closed') yield from ws.close() break elif msg.tp == web.MsgType.error: log.exception(ws.exception()) request.app['sockets'].remove((env.username, ws)) return ws
def placer(): # Download, if needed if not local_archive.exists(): # raise ValueError('No archive, dummy!') download(DATASET_URL, local_archive.name) # Unpack the archive if not local_data.exists(): unpack(local_archive) geojson_file = Path(local_data, 'Parcels_Poly.geojson') if not geojson_file.exists(): shp_to_geojson(Path(local_data, 'Parcels_Poly.shp')) data = rapidjson.loads(open(geojson_file, 'r').read()) upload(build_documents(data))
def load(): try: with open(eva.core.dir_runtime + '/uc_owfs.json') as fd: data = rapidjson.loads(fd.read()) for p in data: d = p.copy() del d['id'] del d['location'] try: create_owfs_bus(p['id'], p['location'], **d) except Exception as e: logging.error(e) except: logging.error('unable to load uc_owfs.json') eva.core.log_traceback() return False return True
def ccr(images, URL, headers={"content-type": "application/json"}): batch_size = len(images) # ccr_body = { # "signature_name": "ccr", # # "instances": [{"image_bytes": {"b64": image_content}}] # "inputs": {"image_bytes": [{"b64": image_content}]} # } ccr_body = { "signature_name": "ccr", "instances": [ # ex: # {"image_bytes": {"b64": image_content}} ] } # print('ccr encode') # now=time.time() # for i in images: # ccr_body["instances"].append(image_encode(i)) # with multiprocessing.Pool(1) as p: # ccr_body["instances"] += p.map(image_encode, images) for i in images: ccr_body["instances"].append({"image_bytes": {"b64": image_encode(i)}}) # with multiprocessing.Pool(1) as p: # ccr_body["instances"] += p.map(image_encode, images) # cost(now) # # now = time.time() r = requests.post(URL, data=json.dumps(ccr_body), headers=headers) # print(r.text) resp = json.loads(r.text) # response_string = resp["outputs"]['ret_img_str_bytes']["b64"] # response_image_bytes = base64.b64decode(response_string) # cost(now) # ccr decode txt_list = list() for idx in range(batch_size): # response_ccr_str = [resp["predictions"][idx]['ccr_dense_decoded']] # # flag(like ccr_dense_decoded) will be ignore, if there are only one output of tf serving response_ccr_str = [resp["predictions"][idx]] txt = ccr_decode(response_ccr_str) txt_list.append(txt) return txt_list
def rds_documentation(encounter, documentation_text, sql_driver=None): if not sql_driver: sql_driver = _build_sql_driver() documentation_text = rebuild_event(rapidjson.loads(documentation_text)) logging.debug( f'after rebuilding, documentation_text is {documentation_text}') encounter_properties = encounter['vertex_properties']['local_properties'] patient_id_value = _find_encounter_property('patient_id', encounter_properties) provider_id_value = _find_encounter_property('provider_id', encounter_properties) identifier = encounter['identifier']['property_value'] logging.debug( f'going to resolve the provider and patient internal_id values') provider_internal_id, patient_internal_id = _resolve_internal_ids( identifier, provider_id_value, patient_id_value) logging.debug( f'resolved values are provider: {provider_internal_id}, patient: {patient_internal_id}' ) entry_kwargs = { 'encounter_internal_id': encounter['internal_id'], 'encounter_type': _find_encounter_property('encounter_type', encounter_properties), 'id_source': _find_encounter_property('id_source', encounter_properties), 'documentation_text': documentation_text['extracted_data']['source']['documentation_text'], 'provider_internal_id': provider_internal_id, 'patient_internal_id': patient_internal_id, 'patient_id_value': patient_id_value, 'provider_id_value': provider_id_value, 'encounter_id_value': int(encounter['id_value']['property_value']) } text_entry = DocumentationTextEntry(**entry_kwargs) logging.debug( f'going to push the created documentation entry: {entry_kwargs} to the database' ) sql_driver.put_documentation(text_entry) logging.debug(f'successfully pushed the documentation to the database')
def f1(line): ss = line.strip().split("\t",1) if len(ss) != 2: return [None] ts = ss[0] zhengwen = ss[1] # if zhengwen == "<!DOCTYPE html>": return [None] # l = len(zhengwen) star = zhengwen.find("({")# + 1 if star == -1: return [None] else: star += 1 end = zhengwen.rfind("})") + 1 # end = l-2 text = zhengwen[star:end] text2 = text.replace(",]","]") text3 = valid_jsontxt(text2) if text3 == '': return [None] ob = json.loads(text3) if type(ob) != type({}): return [None] if not ob.has_key("auctions"): return [None] auctions = ob["auctions"] result = [] for auction in auctions: lv = [] item_id = auction["aid"] amount = auction["amount"] total = auction["total"] qu = auction["qu"] st = auction["st"] inSale = auction["inSale"] start = auction["start"] cp_flag = '2' #复制flag,初始值为2 price = "-" lv.append(item_id) lv.append(price) lv.append(amount) lv.append(total) lv.append(qu) lv.append(st) lv.append(inSale) lv.append(start) lv.append(cp_flag) lv.append(ts) result.append(lv) return result
def get_inference_statistics(self, model_name, model_version="", headers=None, as_json=False): """Get the inference statistics for the specified model name and version. Parameters ---------- model_name : str The name of the model to be unloaded. model_version: str The version of the model to get inference statistics. The default value is an empty string which means then the server will return the statistics of all available model versions. headers: dict Optional dictionary specifying additional HTTP headers to include in the request. as_json : bool If True then returns inference statistics as a json dict, otherwise as a protobuf message. Default value is False. Raises ------ InferenceServerException If unable to unload the model. """ if headers is not None: metadata = headers.items() else: metadata = () try: request = grpc_service_v2_pb2.ModelStatisticsRequest( name=model_name, version=model_version) response = self._client_stub.ModelStatistics(request=request, metadata=metadata) if as_json: return json.loads(MessageToJson(response)) else: return response except grpc.RpcError as rpc_error: raise_error_grpc(rpc_error)
def parse_cmt_new(line_s): line = valid_jsontxt(line_s) ts = line[:line.find('\t')] # ts ='1445270400' # ts=str(time.mktime(datetime.datetime.now().timetuple())) #json_txt = line.strip()[line.find('2(') + 2:-1] #0128 changed #json_txt = line.strip()[line.find('3(') + 2:-1] json_txt = line.strip()[line.find('({"') + 1:-1] ob = json.loads(json_txt) if type(ob) == type({}) and ob.has_key("data") and ob["data"].has_key("rateList"): data = ob['data'] list = [] for value in data['rateList']: try: l = [] itemid = value.get('auctionNumId', '-') int(itemid) l.append(itemid) l.append(value.get('auctionTitle', '-').replace('\t', '')) feedid = value.get('id', '-') int(feedid) l.append(feedid) userid = value.get('userId', '-') int(userid) l.append(userid) # l.append(data.get('userStar')) feedback = value.get('feedback', '-').replace('\t', '') l.append(valid_jsontxt(feedback)) date = value.get('feedbackDate', '-').replace(".","-") l.append(date) annoy = value.get('annoy', '-') l.append(annoy) l.append(ts) date = date[:10].replace('-', '') int(date) if len(date) != 8: print "date is wrong,now is "+date continue # l.append(str(time.mktime(datetime.datetime.now().timetuple()))) list.append([itemid, [feedid,"\001".join(l)]]) except Exception,e: print e,line return list
def f(line): ss = line.strip().split("\t", 2) if len(ss) != 3: return None item_id = ss[1] ts = ss[0] ob = json.loads(valid_jsontxt(ss[2])) if type(ob) != type({}): return None seller = ob.get('seller', {}) shopId = seller.get('shopId', '-') if shopId != '68907524': return None itemInfoModel = ob.get('itemInfoModel', "-") if itemInfoModel == "-": return None title = itemInfoModel.get('title', '-').replace("\n", "").decode("utf-8") result = [] result.append(item_id) result.append(title) result.append(ts) return (item_id, result)
def test_poll_exists(self): url = reverse('poll-data', args=[self.poll.slug]) expected_data = { 'config': { 'equal_width': self.poll.equal_width, 'options': [v.option.name for v in self.votes], 'title': self.poll.title, }, 'votes': [ [self.ballot.voter_name, [v.yes for v in self.votes]], ], } response = self.client.get(url) actual_data = json.loads(response.content) self.assertEqual(actual_data, expected_data) self.assertEqual(response.status_code, HTTPStatus.OK)
async def users(self, request: web.Request): cached = await RedisDB.instance().get("apiuserscache") if cached is not None: return web.json_response(data=json.loads(cached), dumps=json.dumps) # Get all of not cached users = await User.all().prefetch_related('account') resp = [] for user in users: resp.append({ 'user_id': user.id, 'user_last_known_name': user.name, 'address': user.account.address, 'created_ts_utc': self.format_js_iso(user.created_at) }) await RedisDB.instance().set("apiuserscache", json.dumps(resp), expires=1800) return web.json_response(data=resp, dumps=json.dumps)
def safe_load_json(json_string, json_file_key): """ Loads the passed string as a JSON object with exception handing and logging. Some OCW JSON content may be malformed. Args: json_string (str): The JSON contents as a string json_file_key (str or bytes): file ID for the JSON file Returns: JSON (dict): the JSON contents as JSON """ try: loaded_json = rapidjson.loads(json_string) return loaded_json except rapidjson.JSONDecodeError: log.exception("%s has a corrupted JSON", json_file_key) return {}
async def test_transaction_manager_decorator(dummy_coordinator, sample_repository, transaction_manager): uid = '8dce0040-8b72-4bef-b811-98723ea38583' sample = dict(id=uid, name='Second', size=20) TransactionCoordinator = transaction_manager(dummy_coordinator) await TransactionCoordinator(sample_repository).insert_sample(sample) connection_manager = transaction_manager.connection_manager connection_string = connection_manager.settings[0]['dsn'] connection = await connect(connection_string) async with connection.transaction(): result = await connection.fetch(f"SELECT data FROM origin.samples") assert len(result) == 2 assert any(json.loads(item['data'])['id'] == uid for item in result)
def test_larger_structure(): value = { 'words': """ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris adipiscing adipiscing placerat. Vestibulum augue augue, pellentesque quis sollicitudin id, adipiscing. """, 'list': list(range(200)), 'dict': dict((str(i),'a') for i in list(range(200))), 'int': 100100100, 'float': 100999.123456 } dumped = rj.dumps(value) loaded = rj.loads(dumped) assert loaded == value
def fun(line_s): line = valid_jsontxt(line_s) ob = json.loads(line) category_root = ob['from_category_root'] brand = ob['from_brand'] price = ob['price'] comment_count = ob['comment_count'] key = ob.get('key', '') title = ob['title'] item_id = ob['item_id'] category_final = ob['from_category_final'] site = ob.get('site') # site='tmhk' rank = ob.get('rank') return [ category_root, brand, comment_count, price, key, title, item_id, category_final, site, rank ]
def to_internal_value(self, value): if isinstance(value, str): try: value = json.loads(value) except ValueError: pass if isinstance(value, dict) and 'longitude' in value and 'latitude' in value: longitude, latitude = value['longitude'], value['latitude'] if not self._check_number(longitude, (-180, 180)) or not self._check_number(latitude, (-90, 90)): self.fail('invalid') value = Point(longitude, latitude) if not isinstance(value, Point): self.fail('invalid') return value
def __init__(self, fname): self.timings = timings = {} with open(fname) as f: benchmark_data = rapidjson.loads(f.read()) for bm in benchmark_data['benchmarks']: group = bm['group'] desc = bm['name'].split('[', 1)[1][:-1] contender, name = desc.split('-') if contender == 'stdlib json': contender = 'json' if contender in CONTENDERS: bmark = Benchmark(group, name, contender) stats = bm['stats'] timing = Timings(stats['min'], stats['max'], stats['mean'], stats['rounds'], stats['median']) timings[bmark] = timing
def fun(line): ob = json.loads(valid_jsontxt(line)) if type(ob)!=type({}):return None if ob['count']<1:return None ls_cards=ob['cards'] ls=[] for cards in ls_cards: card_group_ls=cards['card_group'] for card_group in card_group_ls: mblog=card_group['mblog'] weibo_idstr=mblog['idstr'] text=mblog['text'] created_timestamp=mblog['created_timestamp'] user=mblog['user'] user_id=user['id'] screen_name=user['screen_name'] ls.append('\001'.join([ str(i) for i in [user_id,screen_name,weibo_idstr,text,created_timestamp]])) return ls
async def validate_session(self, key: str = None): try: result = await self.redis.get("{}:{}".format(SESSION_PREFIX, key)) if not result: return False data = base64.b64decode(result) session_data = data.decode("utf-8").split(":", 1) user = rapidjson.loads(session_data[1]) session = { "key": key, "session_id": session_data[0], self.user_property: user } return session except Exception as err: print(err) logging.debug("Django Session Decoding Error: {}".format(err)) return False
def test_poll_exists(self): url = reverse("poll-data", args=[self.poll.slug]) expected_data = { "config": { "equal_width": self.poll.equal_width, "options": [v.option.name for v in self.votes], "title": self.poll.title, }, "votes": [ [self.ballot.voter_name, [v.choice for v in self.votes]], ], } response = self.client.get(url) actual_data = json.loads(response.content) self.assertEqual(actual_data, expected_data) self.assertEqual(response.status_code, HTTPStatus.OK)
async def test_transaction_manager_decorator_with_rollback( dummy_coordinator, sample_repository, transaction_manager): uid = '67adb3f3-736b-4811-a1a9-c41ff505c5a8' sample = dict(id=uid, name='Third', size=30) TransactionCoordinator = transaction_manager(dummy_coordinator) with raises(Exception): result = await TransactionCoordinator(sample_repository ).failing_insert_sample(sample) connection_manager = transaction_manager.connection_manager connection_string = connection_manager.settings[0]['dsn'] connection = await connect(connection_string) async with connection.transaction(): result = await connection.fetch(f"SELECT data FROM origin.samples") assert not any(json.loads(item['data'])['id'] == uid for item in result)
def _parse_json(self, compression='default', **kwargs): id = self.id resolver = self.id_resolver if compression is 'default': compression = self.id_resolver.compression for k in self.args: if not k in kwargs: kwargs[k] = self.args[k] with resolver.open(id, **kwargs) as fin: rawjson = fin.read() if "object" in kwargs and kwargs['object'] == False: return rawjson if isinstance(rawjson, BytesIO): rawjson = rawjson.decode() return json.loads(rawjson)
async def update_fcm_token_for_account(account: str, token: str, r: web.Request, v2: bool = False): """Store device FCM registration tokens in redis""" redisInst = r.app['rdata'] await set_or_upgrade_token_account_list(account, token, r, v2=v2) # Keep a list of tokens associated with this account cur_list = await redisInst.get(account) if cur_list is not None: cur_list = json.loads(cur_list.replace('\'', '"')) else: cur_list = {} if 'data' not in cur_list: cur_list['data'] = [] if token not in cur_list['data']: cur_list['data'].append(token) await redisInst.set(account, json.dumps(cur_list))
async def loop(self): while not self.stop: try: rec = json.loads(await self.ws.recv()) topic = rec.get("topic", None) if topic and topic == "confirmation": await self.arrival_cb(rec["message"]) elif topic and topic == "active_difficulty": if "network_current" in rec["message"]: WorkClient.instance().active_difficulty = rec["message"]["network_current"] except KeyboardInterrupt: break except websockets.exceptions.ConnectionClosed as e: log.server_logger.error(f"NANO WS: Connection closed to websocket. Code: {e.code} , reason: {e.reason}.") await self.reconnect_forever() except Exception as e: log.server_logger.critical(f"NANO WS: Unknown exception while handling getting a websocket message:\n{traceback.format_exc()}") await self.reconnect_forever()
def _get_data(self, next_id=None, slow=False): url = self.api_root if next_id: self.logger.info("Requesting next stash set: %s" % next_id) url += '?id=' + next_id else: self.logger.info("Requesting first stash set") req = self.rq_context.get(url) if slow: self.set_last_time() req.raise_for_status() self.logger.debug("Acquired stash data") data = json.loads(req.text) self.logger.debug("Loaded stash data from JSON") if 'next_change_id' not in data: raise KeyError( 'next_change_id required field not present in response') return (data['stashes'], data['next_change_id'])
async def request_data(self): log.info("Requesting JSON data from minecraft-ids.grahamedgecombe.com") async with aiohttp.ClientSession() as session: async with session.get(McItems.url) as resp: raw_data = await resp.text() try: self.cache.set("raw_data", raw_data) self.cache.set("last_fetch", time.time()) log.info("New mc dataset in cache") data = loads(raw_data) await self._parse(data) except JSONDecodeError as e: log.critical("Could not load JSON: {}".format(e)) raise RuntimeError log.info("Done")
def test_larger_structure(): value = { 'words': """ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris adipiscing adipiscing placerat. Vestibulum augue augue, pellentesque quis sollicitudin id, adipiscing. """, 'list': list(range(200)), 'dict': dict((str(i),'a') for i in list(range(200))), 'int': 100100100, 'float': 100999.123456 } dumped = rapidjson.dumps(value) loaded = rapidjson.loads(dumped) assert loaded == value
def pro_compress_line(line): ls = line.strip().split("\t") #flag:0-无效,1-正常,2-下架 flag = 0 try: j = json.loads(valid_jsontxt(ls[2])) if j.has_key("ret") and "ERRCODE_QUERY_DETAIL_FAIL" in j["ret"]: flag = 0 elif j["apiStack"]["itemControl"]["unitControl"].has_key( "offShelfUrl") == False: flag = 1 elif j["apiStack"]["itemControl"]["unitControl"].has_key( "offShelfUrl") == True: flag = 2 int(ls[1]) except Exception, e: print e, line.encode("utf-8") return None
def get_model_metadata(self, model_name, model_version="", headers=None, query_params=None): """Contact the inference server and get the metadata for specified model. Parameters ---------- model_name: str The name of the model model_version: str The version of the model to get metadata. The default value is an empty string which means then the server will choose a version based on the model and internal policy. headers: dict Optional dictionary specifying additional HTTP headers to include in the request query_params: dict Optional url query parameters to use in network transaction Returns ------- dict The JSON dict holding the metadata. Raises ------ Exception If unable to get model metadata. """ if not model_version: request_uri = "v2/models/{}".format(quote(model_name)) else: request_uri = "v2/models/{}/versions/{}".format( quote(model_name), model_version) response = self._get(request_uri, headers, query_params) _raise_if_error(response) metadata = json.loads(response.read()) return metadata
def fun(line): if "\001" not in line :return None # ls= valid_jsontxt(line).split("\001") # if len(ls)!=2:return None mid, s = line.split("\001") j = json.loads(valid_jsontxt(s)) if type(j)!=type({}):return None rs=[] for wb in j['reposts']: if 'retweeted_status' not in wb: continue date_tz = utils.parsedate_tz(wb['created_at']) end_date = datetime(*date_tz[:6]) date_tz = utils.parsedate_tz( wb['retweeted_status']['created_at']) start_date = datetime(*date_tz[:6]) # time=[start_date.hour start_date.minute] if start_date.year<2015:continue t = int((end_date - start_date).total_seconds()) #wstr1 = '\001'.join([mid, wb['retweeted_status']['user']['idstr'], wb['user']['idstr'], str(t)]) name = wb['retweeted_status']['user']['screen_name'] # re_s = re.findall(u'//@(.*?)[::]{1}', wb['text'], re.UNICODE) # if len(re_s) != 0: # name = re_s[0] # wstr1 = '\001'.join( # [mid, name.encode('utf8'), wb['user']['screen_name'].encode('utf8'), str(t)]) # wf_1.write(wstr1 + '\n') txt = valid_jsontxt(wb['text']).replace('转发微博', '').replace('\n', '') ##.split('//@')[0] if '//@' in txt: next_user_name=valid_jsontxt(txt).split('//@')[1].split(':')[0] else: next_user_name="" wstr2 = '\001'.join( [valid_jsontxt(i) for i in [mid, name,wb['retweeted_status']['user']['idstr'], wb['user']['screen_name'],wb['user']['idstr'], next_user_name , str(t), txt]]) rs.append(wstr2) return rs
def store_bulk_transactions(self, transactions): txns = [] assets = [] txn_metadatas = [] for t in transactions: transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict())) if transaction['operation'] == t.CREATE: asset = transaction.pop('asset') asset['id'] = transaction['id'] assets.append(asset) metadata = transaction.pop('metadata') txn_metadatas.append({'id': transaction['id'], 'metadata': metadata}) txns.append(transaction) backend.query.store_metadatas(self.connection, txn_metadatas) if assets: backend.query.store_assets(self.connection, assets) return backend.query.store_transactions(self.connection, txns)
def test_base_values(value): dumped = rapidjson.dumps(value) loaded = rapidjson.loads(dumped) assert loaded == value
def test_constants(): for c in [None, True, False]: assert rapidjson.loads(rapidjson.dumps(c)) is c assert rapidjson.loads(rapidjson.dumps([c]))[0] is c assert rapidjson.loads(rapidjson.dumps({'a': c}))['a'] is c