def _handle_op_command(self, msg_header, responseTo): op_command = MongoOpCommand.read_from_bytestream(self.rfile, msg_header.messageLength) print('OP_COMMAND:', dict(**op_command._asdict())) cmd_reply = self._command_reply(op_command.commandName) print('OP_COMMANDREPLY:', cmd_reply) byteresp = bson.dumps(cmd_reply) + bson.dumps({}) self._send_resp(OP_COMMANDREPLY, byteresp, responseTo)
def populate_decorations_list(): name_id_map = read_name_id_mapping() chrome_options = Options() chrome_options.add_argument('--headless') driver = webdriver.Chrome(chrome_options=chrome_options, executable_path='./env/chromedriver') driver.set_page_load_timeout(WEBDRIVER_REQUEST_TIMEOUT) decoration_links = get_all_decoration_links() id_dict = {'ids' : []} for k in decoration_links: attempts = 0 while True: try: temp = process_decoration_data(k, driver, name_id_map) except TimeoutException: print('TimeoutException:', attempts) attempts += 1 if attempts % 5 == 0: driver = webdriver.Chrome(chrome_options=chrome_options, executable_path='./env/chromedriver') driver.set_page_load_timeout(WEBDRIVER_REQUEST_TIMEOUT) continue break print(temp) dec_file = open(DECORATIONS_PATH + str(temp['id']) + '.bson', 'wb') dec_file.write(bson.dumps(temp)) dec_file.close() id_dict['ids'].append(temp['id']) id_file = open(DECORATIONS_PATH + 'id_dict.bson', 'wb') id_file.write(bson.dumps(id_dict)) id_file.close() print('Finished populationg decorations')
def pack(data): data = OrderedDict(sorted(data.items())) bdata = bson.dumps(data) h = hmac.new(get_secret(), bdata, hashlib.sha256) data["mac"] = h.hexdigest() bdata = bson.dumps(data) return bdata
def test_parse_op_msg(): header_doc = { "insert": "test", "$db": "mydb", "writeConcern": { "w": "majority" } } header_bytes = bson.dumps(header_doc) body0_ident = b"documents\x00" body0_doc = {"_id": "Document#1", "myvar": 42} body0_bytes = bson.dumps(body0_doc) flagbits = 0x02.to_bytes(4, byteorder="little", signed=False) #MoreToCome bit bodysize = (len(body0_ident) + len(body0_bytes) + 4).to_bytes( 4, byteorder="little", signed=False) packet = flagbits + b"\x00" + header_bytes + b"\x01" + bodysize + body0_ident + body0_bytes out_flagbits, out_header_doc, out_sections = parse_op_msg(packet) assert header_doc == out_header_doc assert {b"documents": [body0_doc]} == out_sections assert out_flagbits == 0x02
def create_op_msg(header: Dict[str, Any], request_id: int=0, response_to: int=0, more_to_come=False, exhaust_allowed=False, **kwargs): flag_bits = 0x00 if more_to_come: flag_bits += 0x01 if exhaust_allowed: flag_bits += 0x1_0000 flag_bits = struct.pack("<I", flag_bits) header = bson.dumps(header) data_fragments = [flag_bits, b"0x00", header] total_len = 4 + 1 + len(header) seq_header_struct = struct.Struct("<Bi") for name, docs in kwargs.items(): data = [name.encode("ascii") + b"\0"] + [bson.dumps(doc) for doc in docs] section_len = sum(len(d) for d in data) + 4 seq_header = seq_header_struct.pack(0x01, section_len) data_fragments.extend([seq_header] + data) total_len += section_len + 1 total_len += 16 data_fragments = [struct.pack("<iiii", total_len, request_id, response_to, OP_MSG)] + data_fragments return b"".join(data_fragments)
def send_message(self, msg): build_payload = self.build_message(msg) build_bytes = bson.dumps(build_payload) sig = self.priv_key.sign( build_bytes, padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256()) sig_payload = dict() sig_payload['meta_payload'] = build_bytes sig_payload['signature'] = sig message = dict() iv = self.rng(16) message['iv'] = iv enc_payload = bson.dumps(sig_payload) message['encrypted_bytes'] = self.symmetric_key.encrypt( iv, enc_payload, None) message_bytes = bson.dumps(message) self.s.sendall(message_bytes)
def test(): '''Testing BsonNetwork Protocol''' import sys import util import bson import logging from util.test import testFactory from echo import BsonNetworkEchoProtocol, flipMessage parser = util.arg_parser('usage: %prog [options]', logging=logging.WARNING) options, args = parser.parse_args() fmt='[%(asctime)s][%(levelname)8s] %(message)s' logging.basicConfig(level=options.logging, format=fmt) factory = BsonNetworkFactory('echoer', options) factory.logging = logging factory.protocol = BsonNetworkEchoProtocol send = [{'_src':'client', '_dst':'echoer', 'h':'d'} for i in range(0, 64)] recv = map(flipMessage, send) send.insert(0, {'_src':'client'}) recv.insert(0, {'_src':'echoer'}) data = [] for i in range(0, len(send)): data.append((bson.dumps(send[i]), bson.dumps(recv[i]))) testFactory(factory, data)
def detectZones(): '''construct a summary of the location of people in an image Args: zones: list of bounding boxes where each bounding box of a zone is in shape [[x_min, y_min], [x_max, y_max]] ''' zone_detection_endpoint = conf['image_api_host'] + '/zone_detection' zone_detection_params = { 'zones': dumps(request.json['zones']), 'img_name': request.json['image'], 'interest_objs': dumps(request.json['interest_objs']), 'blur_objs': dumps(request.json['blur_objs']) } try: resp = requests.get(zone_detection_endpoint, params=zone_detection_params) if resp.status_code == 200: return make_response(jsonify(resp.json()), 200) else: return make_response(jsonify({'error': "Please check post data"}), resp.status_code) except Exception as err: err.status_code = 503 exceptions(err) return make_response( jsonify({'error': "Exception connecting image server"}), 503)
def examplesource_add_3_integer_bson(): while True: a = random.randint(0, 1000000000) b = random.randint(0, 1000000000) c = random.randint(0, 1000000000) d = a + b + c yield [bson.dumps({0: a, 1: b, 2: c}), bson.dumps({0: d})]
def _create_game(self, num_of_players, clients): start_positions = {2: [], 3: [], 4: []} for key, arr in start_positions.items(): arr.extend([int(i * (MAP_W / key)) for i in range(key)]) game_map = Map(MAP_W, MAP_H, True) game = { 'clients': [], 'positions': start_positions[num_of_players], 'positions_y': [], 'mutex': Lock(), 'num_of_players': num_of_players, 'collision_map': game_map.collision_noise, 'health': [100 for x in range(num_of_players)], 'last_explode_time': 0.0, 'map': game_map, 'clock': 0.0, 'finished': False, 'turn': 0 } for client in clients: game['clients'].append(client) join_game_req = { 'action': REQUEST.JOIN_GAME, 'map': zlib.compress(bson.dumps({'data': game['collision_map']}), 7), 'tank_id': clients.index(client), 'start_x': start_positions[num_of_players] } client.send(bson.dumps(join_game_req)) game['map'].create_map(game['collision_map']) client_invalid_request = False for client in clients: data = client.recv() if data['action'] != REQUEST.JOIN_GAME: client_invalid_request = True for pos_x in game['positions']: game['positions_y'].append(game['map'].get_height(pos_x)) if client_invalid_request: for client in clients: self.clients_lock.acquire() self.clients.remove(client) self.clients_lock.release() client.close() return self.games.append(game) for x in range(num_of_players): self.game_players[clients[x]] = self.games[len(self.games) - 1] self._send_change_turn_req(game)
def _handle_op_command(self, msg_header, responseTo): op_command = MongoOpCommand.read_from_bytestream( self.rfile, msg_header.messageLength) print('OP_COMMAND:', dict(**op_command._asdict())) cmd_reply = self._command_reply(op_command.commandName) print('OP_COMMANDREPLY:', cmd_reply) byteresp = bson.dumps(cmd_reply) + bson.dumps({}) self._send_resp(OP_COMMANDREPLY, byteresp, responseTo)
def test_int(self): dump = dumps(self.good_request_dict) decoded = loads(dump) self.assertEqual(decoded, self.good_request_dict) with self.assertRaises(Exception): dump = dumps(self.bad_request_dict) decoded = loads(dump)
def encode_request(self, req): try: if not isinstance(req.body, dict): # hack to handle simple values body = {WRAPPED_FIELD: req.body} else: body = req.body return bson.dumps(req.header) + bson.dumps(body) except Exception, e: raise GoRpcError('encode error', e)
def encode_request(self, req): try: if not isinstance(req.body, dict): # hack to handle simple values body = {WRAPPED_FIELD: req.body} else: body = req.body return bson.dumps(req.header) + bson.dumps(body) except Exception as e: raise gorpc.GoRpcError('encode error', e)
def parse(self, response) -> Iterator: webPageVector = getWebpageMeanVector(response) if webPageVector[1] != "": webPageSummary = webPageVector[0] url = response.request.url ImageDBTransaction = images.begin(write=True) for id, imageLink in enumerate([ str(urljoin(url, urlparse(url).path) + imageHTMLTagSource) if imageHTMLTagSource.startswith("/") else imageHTMLTagSource for imageHTMLTagSource in response.xpath( "//img/@src").extract() ]): try: ImageDBTransaction.put( encodeURLAsNumber(imageLink, ":image:" + str(id)), bson.dumps({ "image_vec": getSentenceMeanVector( tagImage(imageLink, vocabularyPickle)).tostring(), "word_vec": np.array([ wordVector for wordVector in webPageSummary[0] if type(wordVector) is np.ndarray ]).mean(axis=0).tostring(), "url": imageLink, })) except Exception as e: print(e) ImageDBTransaction.commit() for id, vector in enumerate(webPageSummary[0]): webPageDomain: str = response.xpath( "//meta[@property='og:site_name']/@content").extract_first( ) if webPageDomain: webPageDomain = webPageDomain else: webPageDomain: str = tldextract.extract(url).domain.upper() FUTURE.addElementToIndex( encodeURLAsNumber(url, id), bson.dumps({ "vec": np.array(vector).tostring(), "vec_id": id, "language": webPageVector[2], "sentence": webPageSummary[1][id], "url": url, "domain": webPageDomain, "body": webPageVector[1], })) for href in response.css("a::attr(href)"): yield response.follow(href, self.parse)
def sync() : while True : global locallist global globallist time.sleep(1) content = bson.dumps(locallist) with open('LocLog','w') as f : f.write(content) content = bson.dumps(globallist) with open('GlobLog','w') as f : f.write(content)
def _as_bytes(self) -> bytes: with io.BytesIO() as data: data.write(int.to_bytes(0, length=4, byteorder='little')) data.write(bson.encode_cstring(self.full_collection_name)) data.write( int.to_bytes(self.flags, length=4, byteorder='little', signed=False)) data.write(bson.dumps(self.selector)) data.write(bson.dumps(self.update)) return data.getvalue()
def _handle_connection(self, num_of_players): try: self.s = GameSocket(AF_INET, SOCK_STREAM) self.s.connect( (self.cfg['server']['ip'], self.cfg['server']['port'])) self.ui.get_widget('status_text').text.string = 'Joining lobby...' join_lobby_req = { 'action': REQUEST.JOIN_LOBBY, 'num_of_players': num_of_players } self.s.send(bson.dumps(join_lobby_req)) self.ui.get_widget( 'status_text').text.string = 'Waiting for players...' data = self.s.recv() if data['action'] != REQUEST.JOIN_GAME: self.s.close() self.ui.set_scene('main_menu') self.state = STATE.IN_MENU return data['map'] = bson.loads(zlib.decompress(data['map']))['data'] self.ui.get_widget( 'status_text').text.string = 'Reconstructing map...' self.map.create_map(data['map']) self.tank_id = data['tank_id'] self.tanks = [ PlayerTank(x + 1, self.map, data['start_x'][x]) if x == self.tank_id else NetworkPlayerTank( x + 1, self.map, data['start_x'][x]) for x in range(num_of_players) ] join_game_req = {'action': REQUEST.JOIN_GAME} self.projectiles = [] self.s.send(bson.dumps(join_game_req)) self.turn = GameTurn() self.ui.set_scene('in_game') self.state = STATE.IN_GAME self.finished = False self.timer.restart() while not self.finished: data = self.s.recv() self._process_packet(data) except Exception as e: print(f'connecting to server failed {e}') self.s.close() self.ui.set_scene('main_menu') self.state = STATE.IN_MENU
def import_skeleton(self, other, skeleton_name): skeleton = other.load_skeleton_legacy(skeleton_name) if skeleton is None: return skeleton_format = skeleton.to_unity_format() skeleton_format = bson.dumps(skeleton_format) skeleton_format = bz2.compress(skeleton_format) skeleton_model = bson.dumps(json.loads(skeleton.skeleton_model)) skeleton_model = bz2.compress(skeleton_model) records = [[skeleton_name, skeleton_format, skeleton_model]] self.insert_records(self.skeleton_table, ["name", "data", "metaData"], records)
def populate_weapons_list(): name_id_map = read_name_id_mapping() chrome_options = Options() chrome_options.add_argument('--headless') driver = webdriver.Chrome(chrome_options=chrome_options, executable_path='./env/chromedriver') driver.set_page_load_timeout(WEBDRIVER_REQUEST_TIMEOUT) weapon_links = get_all_weapon_links() links = [] for w in weapon_links.values(): links += w bm_weapons = ['Great Sword', 'Long Sword', 'Sword', 'Dual Blades', 'Hammer', 'Hunting Horn', 'Lance', 'Gunlance', 'Switch Axe', 'Charge Blade', 'Insect Glaive'] g_weapons = ['Bow', 'Light Bowgun', 'Heavy Bowgun'] id_dict = { 'Blademaster' : [], 'Gunner' : [] } for k in links: attempts = 0 while True: try: temp = process_weapon_data(k, driver, name_id_map, bm_weapons, g_weapons) except TimeoutException: print('TimeoutException: ', attempts) attempts += 1 if attempts % 5 == 0: driver = webdriver.Chrome(chrome_options=chrome_options, executable_path='./env/chromedriver') driver.set_page_load_timeout(WEBDRIVER_REQUEST_TIMEOUT) continue break print(temp) if temp['Weapon_Family'] in bm_weapons: weapon_file = open(WEAPONS_PATH + 'blademaster/' + str(temp['id']) + '.bson', 'wb') weapon_file.write(bson.dumps(temp)) weapon_file.close() id_dict['Blademaster'].append(temp) # oops, should have only been id elif temp['Weapon_Family'] in g_weapons: weapon_file = open(WEAPONS_PATH + 'gunner/' + str(temp['id']) + '.bson', 'wb') weapon_file.write(bson.dumps(temp)) weapon_file.close() id_dict['Gunner'].append(temp) # oops, should have only been id else: print('Not in a Weapon Family!!!!') return print('Populating complete') id_file = open(WEAPONS_PATH + 'id_dict.bson', 'wb') id_file.write(bson.dumps(id_dict)) id_file.close() print('ID dict written')
def send(self, socket): """ Sends the message over the socket. """ try: if self.master_setup_data_message != None: master_setup_data_message = self.master_setup_data_message.dump() else: master_setup_data_message = "" if self.master_control_data_message != None: master_control_data_message = self.master_control_data_message.dump() else: master_control_data_message = "" BaseMessage._send_with_destination_and_delimiter(self, socket, self.reply_id_tag, bson.dumps(self.settings_dict), self.sender_dealer_id_tag, str(self.sender_master_flag), master_setup_data_message, master_control_data_message, str(self.master_synchronization_failure_flag), str(self.ping_back_success_flag)) except: raise ExceptionFormatter.get_full_exception()
def _recvobj(self, clientid, doc): '''Receives bson document `doc` via socket `clientid`.''' print len(bson.dumps(doc)) res = self.socks[clientid].recvobj() #res = self.socks[clientid].recv(len(bson.dumps(doc))) print res return res
def get_public_text(self): # This will generate a publishable certificate text. # - subject # - pubkeyring baseinfo = self.get_baseinfo() # format serializer. hash_source = hashable_obj(baseinfo) # Get Hashes hashes = [] for algoname in ['SHA512','SHA1','SHA256','MD5','WHIRLPOOL']: hashes.append({'Algorithm':algoname,'Hash':self.get_hash(algoname)}) # Get Signatures sigs = [] if self.signatures: for sig in self.signatures: sigs.append(sig) # Output j = { 'ID' : self.get_id(), 'Title' : 'Xi_Certificate', 'Basic' : baseinfo, 'Finger_Print' : hashes, 'Signatures' : sigs, } # return return serializer.dumps(j)
def sign_certificate(self,pubcert,trustlevel=0,life=0x9E3400, cert_hashalgo='whirlpool', raw=False): # 用本证书签署 pubcert, 信任等级默认为0,有效期120天,使用 do_sign 进行最终的签名 nowtime = time.time() + time.timezone # XXX 注意检查确认为 UTC 时间 rawinfo = { 'Title' : 'New_Signature', 'Certified_ID' : pubcert.get_id(), 'Issuer_ID' : self.get_id(), 'Issue_UTC' : int(nowtime), 'Valid_To' : int(nowtime + life), 'Trust_Level' : int(trustlevel), 'Cert_Hash_Algorithm' : cert_hashalgo, 'Cert_Digest' : pubcert.get_hash(cert_hashalgo), } log.info('Signing Certificate: Subject[%s] TrustLevel[%s] ValidTo[%s]',rawinfo['Certified_ID'],rawinfo['Trust_Level'],rawinfo['Valid_To']) sig = self.do_sign(hashable_obj(rawinfo),raw=True) ret = {"Content":rawinfo,"Signature":sig} # 将签名写入 pubcert pubcert.signatures.append(ret) if raw: return ret else: return serializer.dumps(ret)
def do_sign(self,message,raw=True): # 通用的签名方法 if not self.is_ours: raise Exception("This is not a private certificate that can be used for signing.") ret = {} keyindex = 1 for key in self.keys: signer = signature.signature(key.get_privatekey()) signlimit = key.sign_limit() hashalgo = Hash().consult(signlimit) if len(hashalgo) < 1: raise Exception("No suitable hash functions found.") maxhash = hashalgo[max(hashalgo.keys())] choosenalgo = maxhash[random.randint(0,len(maxhash) - 1)] sig = signer.new(message,choosenalgo,raw) # XXX 安全泄漏。应当考虑一种提供选择的方法 ret[str(keyindex)] = sig keyindex += 1 if raw: return ret else: return serializer.dumps(ret) log.info("Successfully made a sign.")
def file_payload(name): # files only this version raw = pack.read_file(name) d = bson.dumps({'type': 'file', 'name': name, 'data': raw}) return d
def test_random_tree(self): for i in xrange(0, 16): p = {} populate(p, 256, 4) sp = dumps(p) p2 = loads(sp) self.assertEquals(p, p2)
def __init__(self,keystr): try: if type(keystr) == str: j = serializer.loads(keystr) else: j = keystr if j['type'] == 'RSA_Public_Key': self.key = _RSA() self.is_private_key = False elif j['type'] == 'EC_Public_Key': self.key = _EC() self.is_private_key = False elif j['type'] == 'RSA_Private_Key': self.key = _RSA() self.is_private_key = True elif j['type'] == 'EC_Private_Key': self.key = _EC() self.is_private_key = True else: raise Exception("Unrecognized type of public key.") keystr = serializer.dumps(j) if self.is_private_key: self.key.load_privatekey(keystr) else: self.key.load_publickey(keystr) except Exception,e: raise Exception("Failed initilizing PublicKeyAlgorithm: %s" % e)
def encrypt(self,message,encryptor): if self._pubkey == None or self._pubkey_curve == None: # To send message via PublicKey, We must know it's curve. return False # Get a temp. key tempkey = EC.gen_params(self._pubkey_curve) tempkey.gen_key() sharedsecret = tempkey.compute_dh_key(self._pubkey) log.debug("Length of key is: %d",(len(sharedsecret) * 8)) # Encrypt ciphertext = encryptor(sharedsecret,message) # Get tempkey's public key. membuff = BIO.MemoryBuffer() tempkey.save_pub_key_bio(membuff) publickey = membuff.read_all() #open(filename).read() # Return with serializer. ret = serializer.dumps( { 'type':'EC_Encrypted', 'public_key':publickey, 'ciphertext':ciphertext, } ) return ret
def test_auth(): url = base_url + "/api/auth" payload = { "email": "*****@*****.**", "password": "******" } payload = bson.dumps(payload) headers = { 'Content-Type': "application/json", 'cache-control': "no-cache", 'Postman-Token': "4a0a5983-fb41-4353-81b6-aaa6d2a42d44" } response = requests.request("GET", url, data=payload, headers=headers) print(response, response.text) if response.status_code == 200: data = bson.loads(response.content) global admin_token admin_token = data["token"] print(type(admin_token)) pprint(data)
def write_signature(self): old_directory = os.getcwd() enter_in_system_directory() with open("license.bs", "wb") as file: dictionary = {"signature": self.sign, "key": ""} file.write(bson.dumps(dictionary)) os.chdir(old_directory)
async def handler_fn(request): websocket = await request.accept() message = bson.dumps({ 'data': streaming_data, }) await websocket.send_message(message) await websocket.aclose(reason='EOF')
def _endpoint_thread(endpoint_func: Callable, clients: List[socket.socket], semaphore: Semaphore, pose: PoseProvider, tracker_args: Dict): while True: # update tracking info tracking_info = _transform_tracks( tracking_info=endpoint_func(tracker_args), pose=pose.get_pose()) if tracking_info is not None: bson_data = bson.dumps(tracking_info) size = len(bson_data) size_data = struct.pack("!i", size) data = size_data + bson_data # update clients semaphore.acquire() to_remove = [] for client in clients: try: client.sendall(data) except Exception: logger.debug("transmit failure", exc_info=True) to_remove.append(client) for sock in to_remove: clients.remove(sock) semaphore.release()
def save(self, out, format): """ """ # # Serialize # if format == "WKT": if "wkt" in self.content["crs"]: out.write(self.content["crs"]["wkt"]) else: out.write(_sref_4326().ExportToWkt()) return if format in ("GeoJSON", "GeoBSON", "GeoAMF"): content = self.content if "wkt" in content["crs"]: content["crs"] = {"type": "link", "properties": {"href": "0.wkt", "type": "ogcwkt"}} else: del content["crs"] elif format in ("ArcJSON", "ArcBSON", "ArcAMF"): content = reserialize_to_arc(self.content, format == "ArcAMF") else: raise KnownUnknown( 'Vector response only saves .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % format ) # # Encode # if format in ("GeoJSON", "ArcJSON"): indent = self.verbose and 2 or None encoded = JSONEncoder(indent=indent).iterencode(content) float_pat = compile(r"^-?\d+\.\d+$") for atom in encoded: if float_pat.match(atom): out.write("%.6f" % float(atom)) else: out.write(atom) elif format in ("GeoBSON", "ArcBSON"): import bson encoded = bson.dumps(content) out.write(encoded) elif format in ("GeoAMF", "ArcAMF"): import pyamf for class_name in pyamf_classes.items(): pyamf.register_class(*class_name) encoded = pyamf.encode(content, 0).read() out.write(encoded)
async def handler_fn(request): websocket = await request.accept() message = bson.dumps({ 'urls': download_urls, }) await websocket.send_message(message) await websocket.aclose(reason='EOF')
def test_load(): pdf_file = open("./data/original_file", "rb") pdf = pdf_file.read() url = base_url + "/api/upload" payload = { "exam_id": 6, "exam_pdf": pdf } payload = bson.dumps(payload) headers = { 'Content-Type': "application/json", 'token': str(admin_token), 'cache-control': "no-cache", 'Postman-Token': "4a0a5983-fb41-4353-81b6-aaa6d2a42d44" } pprint(headers) response = requests.request("POST", url, data=payload, headers=headers) print(response, response.text) if response.status_code == 200: print(bson.loads(response.content))
def convert_json2bson(inp_fp, out_fp): # load json with open(inp_fp, 'rb') as f: json_obj = json.load(f, object_pairs_hook=OrderedDict) # convert and write bson with open(out_fp, 'wb') as f: f.write(bson.dumps(json_obj))
def test_build_data_collection(self, setup, blank_state, mocker, deckhand_orchestrator): """Test that the build data collection from MaaS works.""" sample_data = { 'lshw': '<xml><test>foo</test></xml>'.encode(), 'lldp': '<xml><test>bar</test></xml>'.encode(), } bson_data = bson.loads(bson.dumps(sample_data)) machine = mocker.MagicMock() mocker_config = { 'get_details.return_value': bson_data, 'hostname': 'foo', } machine.configure_mock(**mocker_config) task = objects.Task(statemgr=blank_state) action = ConfigureHardware(task, deckhand_orchestrator, blank_state) action.collect_build_data(machine) bd = blank_state.get_build_data(node_name='foo') assert len(bd) == 2
def upload_motion_to_db(url, name, motion_data, collection, skeleton_name, meta_data, is_processed=False, session=None): if type(motion_data) == dict: motion_data = bson.dumps(motion_data) motion_data = bz2.compress(motion_data) motion_data = base64.b64encode(motion_data).decode() parts = split(motion_data) for idx, part in enumerate(parts): #part_data = bytearray(part, "utf-8") data = { "data": part, "name": name, "skeleton_name": skeleton_name, "meta_data": meta_data, "collection": collection } data["part_idx"] = idx data["n_parts"] = len(parts) data["is_processed"] = is_processed if session is not None: data.update(session) result_text = call_rest_interface(url, "upload_motion", data)
def test_unknown_handler(self): d = Decimal("123.45") obj = {"decimal": d} serialized = dumps(obj, on_unknown=float) unserialized = loads(serialized) self.assertEqual(float(d), unserialized["decimal"])
def main(): logger.remove() logger.add( sys.stdout, colorize=(not in_automation()), format="<green>{time:HH:mm:ss.SSS}</green> {level} <lvl>{message}</lvl>" ) parser = argparse.ArgumentParser() parser.add_argument('--model', required=True, help='Output file to contain the resulting model.') parser.add_argument('--purge-pulumi-stack', action='store_true', help='Purge zombie resources from Pulumi Stack.') args = parser.parse_args() model, issues = ModelBuilder().build() log_issues(issues) model_json = bson.dumps(model) # TODO: avoid persisting passwords in plain. # How: assuming only SSM-stored passwords are supported, postpone dereferencing them to the next stage. Path(args.model).write_bytes(model_json) if args.purge_pulumi_stack: do_purge_pulumi_stack()
def _mate_mutate_indivials(population, individual_ids): data = {} for index, individual_id in enumerate(individual_ids): index = "%s" % index data[index] = {} data[index]["code"] = redisconnection.get("individual.%s.code" % individual_id) data[index]["memory"] = redisconnection.get( "individual.%s.memory" % individual_id) data[index]["fitness_relative"] = redisconnection.get( "individual.%s.fitness_relative_adult" % individual_id) if data[index]["code"] == None or data[index][ "fitness_relative"] == None or data[index][ "memory"] == None: print("mate fail1") return None pipe = redisconnection.pipeline() pipe.get("species.%s.max_code_length" % population.species_id) pipe.get("species.%s.min_code_length" % population.species_id) max_code_length, min_code_length = [int(r) for r in pipe.execute()] for i in range(0, 10): selected_matemutator = evolutionMateMutate.get_random_individual() new_code = selected_matemutator.execute(bson.dumps(data)) new_code = EvolutionaryMethods.limit_code_length( new_code, min_code_length, max_code_length) if len(new_code) > 5: #print("mate ok") return EvolutionaryMethods._save_new_individual( population, selected_matemutator.getIdentifier(), new_code) else: #print("mate bad") selected_matemutator.addFitness(0) return None
def octoprinttunnel_http_response_set(ref, data, expire_secs=TUNNEL_RSP_EXPIRE_SECS): key = f"{TUNNEL_PREFIX}.{ref}" with BREDIS.pipeline() as pipe: pipe.lpush(key, bson.dumps(data)) pipe.expire(key, expire_secs) pipe.execute()
def do_POST(self): self.method = 'POST' self.args = self._get_args() if not self._verify_sig(): return self._send(401) try: return self._send(int(self.args['user'])) except: ret = { 'stat': 'OK' } if self.path == '/rest/v1/preauth.bson': if self.args['user'] == 'preauth-ok-missing_response': pass elif self.args['user'] == 'preauth-fail-missing_response': ret['stat'] = 'FAIL' elif self.args['user'] == 'preauth-bad-stat': ret['stat'] = 'FFFFUUUU' elif self.args['user'] == 'preauth-fail': d = { 'stat': 'FAIL', 'code': 666, 'message': 'you fail' } elif self.args['user'] == 'preauth-deny': ret['response'] = { 'result': 'deny', 'status': 'you suck' } elif self.args['user'] == 'preauth-allow': ret['response'] = { 'result': 'allow', 'status': 'you rock' } elif self.args['user'] == 'preauth-allow-bad_response': ret['response'] = { 'result': 'allow', 'xxx': 'you rock' } else: ret['response'] = { 'result': 'auth', 'prompt': 'Duo login for %s\n\n' % self.args['user'] + \ 'Choose or lose:\n\n' + \ ' 1. Push 1\n 2. Phone 1\n' + \ ' 3. SMS 1 (deny)\n 4. Phone 2 (deny)\n\n' + \ 'Passcode or option (1-4): ', 'factors': { 'default': 'push1', '1': 'push1', '2': 'voice1', '3': 'smsrefresh1', '4': 'voice2', } } elif self.path == '/rest/v1/auth.bson': if self.args['factor'] == 'auto': txid = 'tx' + self.args['auto'].upper() if self.args['async'] == '1': ret['response'] = { 'txid': txid } else: ret['response'] = self._get_tx_response(txid, 0) else: ret['response'] = { 'result': 'deny', 'status': 'no %s' % self.args['factor'] } else: return self._send(404) buf = bson.dumps(ret) return self._send(200, buf)
def test_false_value(self): data = {"key": False} serialized = bson.dumps(data) deserialized = bson.loads(serialized) self.assertIsInstance(deserialized["key"], bool) self.assertFalse(deserialized["key"]) self.assertTrue(serialized == b'\x0b\x00\x00\x00\x08key\x00\x00\x00')
def read_from_bytestream(cls, buffer_stream, messageLength): messageLength -= sizeof(MongoMsgHeader) database = read_cstring(buffer_stream) ;messageLength -= len(database) + 1 commandName = read_cstring(buffer_stream) ;messageLength -= len(commandName) + 1 buffer = buffer_stream.read(messageLength) metadata = bson.loads(buffer) commandReply = bson.loads(buffer[len(bson.dumps(metadata)):]) return cls(database, commandName, metadata, commandReply)
def test_uuid(self): uuid = UUID('584bcd8f-6d81-485a-bac9-629c14b53847') obj = {"uuid": uuid} serialized = dumps(obj) obj2 = loads(serialized) self.assertIsInstance(obj2['uuid'], UUID) self.assertTrue(obj2['uuid'] == uuid)
def send_channel(channel, partner_ids): response = dict(method=u'create', channel=unicode(channel.name)) if channel.is_group_chat: response['partner_ids'] = partner_ids else: response['partner_id'] = partner_ids[0] self.transport.write(bson.dumps(response))
def test_decimal(self): decimal = Decimal('1234.45') obj = {"decimal": decimal} serialized = dumps(obj) obj2 = loads(serialized) self.assertIsInstance(obj2['decimal'], float) self.assertTrue(obj2['decimal'] == float(decimal))
def send(self, socket): """ Sends the message over the socket. """ try: BaseMessage._send(self, socket, self.queue_name, self.routing_key, bson.dumps({self.routing_key: self.data})) except: raise ExceptionFormatter.get_full_exception()
def __call__(self, *args): postdata = dumps({"method": self.__serviceName, "params": args, "id": "jsonrpc"}) respdata = urllib.urlopen(self.__serviceURL, postdata).read() resp = loads(respdata) if resp["error"] != None: raise JSONRPCException(resp["error"]) else: return resp["result"]
def save(self, out, format): """ """ # # Serialize # if format == 'WKT': if 'wkt' in self.content['crs']: out.write(self.content['crs']['wkt']) else: out.write(_sref_4326().ExportToWkt()) return if format in ('GeoJSON', 'GeoBSON', 'GeoAMF'): content = self.content if 'wkt' in content['crs']: content['crs'] = {'type': 'link', 'properties': {'href': '0.wkt', 'type': 'ogcwkt'}} else: del content['crs'] elif format in ('ArcJSON', 'ArcBSON', 'ArcAMF'): content = reserialize_to_arc(self.content, format == 'ArcAMF') else: raise KnownUnknown('Vector response only saves .geojson, .arcjson, .geobson, .arcbson, .geoamf, .arcamf and .wkt tiles, not "%s"' % format) # # Encode # if format in ('GeoJSON', 'ArcJSON'): indent = self.verbose and 2 or None encoded = JSONEncoder(indent=indent).iterencode(content) float_pat = compile(r'^-?\d+\.\d+$') for atom in encoded: if float_pat.match(atom): piece = ('%%.%if' % self.precision) % float(atom) else: piece = atom out.write(piece.encode('utf8')) elif format in ('GeoBSON', 'ArcBSON'): import bson encoded = bson.dumps(content) out.write(encoded) elif format in ('GeoAMF', 'ArcAMF'): import pyamf for class_name in pyamf_classes.items(): pyamf.register_class(*class_name) encoded = pyamf.encode(content, 0).read() out.write(encoded)
def do_get_channels(self, request): def get_recent_messages(channel): return [ dict( message=message.message, writer=message.writer, type=message.type, published_at=message.published_at ) for message in DnaMessage.query(channel__eq=channel, scan_index_forward=False, limit=20) ] def get_join_infos(channel): return [ dict( user=join_info.user_id, joined_at=join_info.joined_at, last_read_at=join_info.last_read_at ) for join_info in ChannelJoinInfo.by_channel(channel) if join_info.user_id != self.user.id ] channel_dicts = [] channels = dict( (channel.name, channel) for channel in Channel.batch_get(*[(join_info.channel,) for join_info in self.user.join_infos]) ) users = set() for join_info in self.user.join_infos: channel = channels[join_info.channel] recent_messages = get_recent_messages(channel.name) if not recent_messages and not channel.is_group_chat: continue partner_join_info_dicts = get_join_infos(channel.name) channel_dicts.append(dict( channel=join_info.channel, unread_count=DnaMessage.query( channel__eq=channel.name, published_at__gt=join_info.last_read_at ).count(), recent_messages=recent_messages, join_infos=partner_join_info_dicts, is_group_chat=channel.is_group_chat )) [users.add(partner_join_info_dict['user']) for partner_join_info_dict in partner_join_info_dicts] last_sent_at = time.time() join_info.last_sent_at = last_sent_at join_info.save() response = dict( method=u'get_channels', users=list(users), channels=channel_dicts ) self.transport.write(bson.dumps(response))
def do_ack(self, request): message = dict( sender=self.user.id, published_at=request['published_at'], method=u'ack', channel=request['channel'] ) self.factory.redis_session.publish(request['channel'], bson.dumps(message)) self.factory.log_queue.write(QueueMessage(body=json.dumps(message)))
def _handle_op_query(self, msg_header, responseTo): op_query = MongoOpQuery.read_from_bytestream(self.rfile, msg_header.messageLength) print('OP_QUERY:', dict(**op_query._asdict())) assert op_query.query['isMaster'] == 1 op_reply = self._command_reply(b'isMaster') print('OP_REPLY:', op_reply) byteresp = convert_struct_to_bytes(MongoOpReply(responseFlags=8, cursorID=0, startingFrom=0, numberReturned=op_query.numberToReturn)) byteresp += bson.dumps(op_reply) self._send_resp(OP_REPLY, byteresp, responseTo)
def send(self, socket): """ Sends the message over the socket. """ try: BaseMessage._send(self, socket, bson.dumps(self.store_key_to_data_dict), self.expire_seconds) except: raise ExceptionFormatter.get_full_exception()
def __proc(self, op, args, kwargs): res = '' func = getattr(self, op) if func: ret = func(*args,**kwargs) if ret: #log_debug('RPCServer._proc', 'the return of func is : %s' % str(ret)) res = ret return bson.dumps({'res':res})