def validateMessage(self): """Callback for validate button. Attempts to encode the message with the current type definition """ try: json_data = self._text_editor.getText().tostring() blackboxprotobuf.protobuf_from_json(json_data, self.message_type) # If it works, save the message self._original_json = json_data except Exception as exc: JOptionPane.showMessageDialog(self._component, str(exc)) self._callbacks.printError(traceback.format_exc())
def getMessage(self): """Transform the JSON format back to the binary protobuf message""" try: if self.message_type is None or not self.isModified(): return self._original_content json_data = self._text_editor.getText().tostring() protobuf_data = blackboxprotobuf.protobuf_from_json( json_data, self.message_type) protobuf_data = self.encodePayload(protobuf_data) if 'set_protobuf_data' in dir(user_funcs): result = user_funcs.set_protobuf_data( protobuf_data, self._original_content, self._is_request, self._content_info, self._helpers, self._request, self._request_content_info) if result is not None: return result headers = self._content_info.getHeaders() return self._helpers.buildHttpMessage(headers, str(protobuf_data)) except Exception as exc: self._callbacks.printError(traceback.format_exc()) JOptionPane.showMessageDialog( self._component, "Error encoding protobuf: " + str(exc)) # Resets state return self._original_content
def test_encode_json(x): # Test with JSON payload if "testBytes" in x: x["testBytes"] = x["testBytes"].decode("latin1") json_str = json.dumps(x) hypothesis.note("JSON Str Input:") hypothesis.note(json_str) hypothesis.note(json.loads(json_str)) encoded = blackboxprotobuf.protobuf_from_json(json_str, testMessage_typedef) hypothesis.note("BBP decoding:") test_decode, _ = blackboxprotobuf.decode_message(encoded, testMessage_typedef) hypothesis.note(test_decode) message = Test_pb2.TestMessage() message.ParseFromString(encoded) hypothesis.note("Message:") hypothesis.note(message) for key in x.keys(): hypothesis.note("Message value") hypothesis.note(type(getattr(message, key))) hypothesis.note("Original value") hypothesis.note(type(x[key])) if key == "testBytes": x[key] = six.ensure_binary(x[key], encoding="latin1") assert getattr(message, key) == x[key]
def test_message_json_inverse(x): config = Config() typedef, message = x encoded = length_delim.encode_message(message, config, typedef) decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json( encoded, config=config, message_type=typedef) encoded_json = blackboxprotobuf.protobuf_from_json( decoded_json, config=config, message_type=typedef_out) decoded, typedef_out = blackboxprotobuf.decode_message( encoded_json, config=config, message_type=typedef) assert isinstance(encoded, bytearray) assert isinstance(decoded, dict) assert message == decoded
def applyType(self, typedef): """Apply a new typedef to the message. Throws an exception if type is invalid.""" # Convert to protobuf as old type and re-interpret as new type old_message_type = self.message_type json_data = self._text_editor.getText().tostring() protobuf_data = blackboxprotobuf.protobuf_from_json(json_data, old_message_type) new_json, message_type = blackboxprotobuf.protobuf_to_json(str(protobuf_data), typedef) # Should exception out before now if there is an issue # Set the message type and reparse with the new type self.message_type = message_type self._text_editor.setText(str(new_json)) message_hash = self.getMessageHash() self._extender.known_types[message_hash] = message_type
def test_modify_json(x, modify_num): modify_key = testMessage_typedef[modify_num]["name"] message = Test_pb2.TestMessage() for key, value in x.items(): setattr(message, key, value) encoded = message.SerializeToString() decoded_json, typedef = blackboxprotobuf.protobuf_to_json( encoded, testMessage_typedef) decoded = json.loads(decoded_json) # eliminate any cases where protobuf defaults out a field hypothesis.assume(modify_key in decoded) if isinstance(decoded[modify_key], str): mod_func = lambda x: "test" elif six.PY2 and isinstance(decoded[modify_key], unicode): mod_func = lambda x: six.u("test") elif isinstance(decoded[modify_key], bytes): mod_func = lambda x: b"test" elif isinstance(decoded[modify_key], six.integer_types): mod_func = lambda x: 10 elif isinstance(decoded[modify_key], float): mod_func = lambda x: 10 else: hypothesis.note("Failed to modify key: %s (%r)" % (modify_key, type(decoded[modify_key]))) assert False decoded[modify_key] = mod_func(decoded[modify_key]) x[modify_key] = mod_func(x[modify_key]) encoded = blackboxprotobuf.protobuf_from_json(json.dumps(decoded), testMessage_typedef) message = Test_pb2.TestMessage() message.ParseFromString(encoded) for key in decoded.keys(): hypothesis.note("Message value:") hypothesis.note(type(getattr(message, key))) hypothesis.note("Orig value:") hypothesis.note((x[key])) if key == "testBytes": x[key] = six.ensure_binary(x[key], encoding="latin1") assert getattr(message, key) == x[key]
def applyType(self, typedef): """Apply a new typedef to the message. Throws an exception if type is invalid.""" # store a reference for later mutation? self._source_typedef = typedef # Convert to protobuf as old type and re-interpret as new type old_message_type = self.message_type json_data = self._text_editor.getText().tostring() protobuf_data = blackboxprotobuf.protobuf_from_json(json_data, old_message_type) new_json, message_type = blackboxprotobuf.protobuf_to_json( str(protobuf_data), typedef ) # Should exception out before now if there is an issue self.message_type = message_type # if the json data was modified, then re-check our types if json_data != self._last_set_json: self._filtered_message_model.set_new_data(protobuf_data) self._last_set_json = str(new_json) self._text_editor.setText(str(new_json))
def resolve_meeting_code(meetcode): # print(repr(get_requestdata_template(code)[1].format(code))) rl = "https://meet.google.com/$rpc/google.rtc.meetings.v1.MeetingSpaceService/ResolveMeetingSpace" rh = { "content-type": "application/x-protobuf", "cookie": getenv('COOKIE'), "authorization": generate_sapisidhash(), "x-goog-api-key": getenv('GAPIKEY'), "x-goog-authuser": getenv('COOKIE_AUTHUSER'), "x-goog-encode-response-if-executable": "base64", "x-origin": "https://meet.google.com" } rd = protobuf_from_json( dumpsJSON({ '1': meetcode, '6': 1 }), { '1': { 'type': 'bytes', 'name': '' }, '6': { 'type': 'int', 'name': '' } } ) r = post( rl, headers=rh, data=rd ) if r.status_code != 200: # print(repr(r.status_code), repr(r.text)) if r.status_code == 401 and "Request had invalid authentication credentials." in r.text: raise RequestError("Authentication failed.", rl, rh, rd, r) if r.status_code == 403 and "The request is missing a valid API key." in r.text: raise RequestError("API key invalid.", rl, rh, rd, r) if r.status_code == 400 and "Request contains an invalid argument." in r.text: raise RequestError("Invalid argument during request.", rl, rh, rd, r) if r.status_code == 400 and "The conference is gone" in r.text: print(SERIALIZATION_DELIM.join(('result', "Meeting space ended.",))) exit(8) if r.status_code == 404 and "Requested meeting space does not exist." in r.text: print(SERIALIZATION_DELIM.join(('result', "No such meeting code.",))) exit(7) if r.status_code == 403 and "The requester cannot resolve this meeting" in r.text: exit(9) raise RequestError("Unknown error.", rl, rh, rd, r) p = protobuf_to_json(b64decode(r.text)) if getenv("PROTOBUF_DEBUG_LOG_ENDPOINT") is not None: post( getenv("PROTOBUF_DEBUG_LOG_ENDPOINT"), json={"content": "```\n{}\n```\n```json\n{}\n```".format(r.text, p[0])} ) p = loadsJSON(p[0]) spacecode = p['1'] meetcode = p['2'] if type(p['2']) is str else findall(r"https?://meet\.google\.com/(?:_meet/)?(\w{3}-\w{4}-\w{3})(?:\?.*)?$", p['3'])[0] meeturl = p['3'] lookupcode = p.get('7', None) gmeettoken = r.headers.get('x-goog-meeting-token', None) details = p.get('6') organization = None maxparticipants = None if details: organization = details.get('4') maxparticipants = details.get('6') return (spacecode, meetcode, meeturl, gmeettoken, lookupcode, organization, maxparticipants)
def test_anon_json_decode(x): config = Config() typedef, message = x encoded = blackboxprotobuf.encode_message(message, config=config, message_type=typedef) decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json( encoded, config=config) encoded_json = blackboxprotobuf.protobuf_from_json( decoded_json, config=config, message_type=typedef_out) decoded, typedef_out = blackboxprotobuf.decode_message(encoded_json, config=config) note("Original message: %r" % message) note("Decoded JSON: %r" % decoded_json) note("Decoded message: %r" % decoded) note("Original typedef: %r" % typedef) note("Decoded typedef: %r" % typedef_out) def check_message(orig, orig_typedef, new, new_typedef): for field_number in set(orig.keys()) | set(new.keys()): # verify all fields are there assert field_number in orig assert field_number in orig_typedef assert field_number in new assert field_number in new_typedef orig_values = orig[field_number] new_values = new[field_number] orig_type = orig_typedef[field_number]["type"] new_type = new_typedef[field_number]["type"] note("Parsing field# %s" % field_number) note("orig_values: %r" % orig_values) note("new_values: %r" % new_values) note("orig_type: %s" % orig_type) note("new_type: %s" % new_type) # Fields might be lists. Just convert everything to a list if not isinstance(orig_values, list): orig_values = [orig_values] assert not isinstance(new_values, list) new_values = [new_values] assert isinstance(orig_values, list) assert isinstance(new_values, list) # if the types don't match, then try to convert them if new_type == "message" and orig_type in ["bytes", "string"]: # if the type is a message, we want to convert the orig type to a message # this isn't ideal, we'll be using the unintended type, but # best way to compare. Re-encoding a message to binary might # not keep the field order new_field_typedef = new_typedef[field_number][ "message_typedef"] for i, orig_value in enumerate(orig_values): if orig_type == "bytes": ( orig_values[i], orig_field_typedef, _, ) = length_delim.decode_lendelim_message( length_delim.encode_bytes(orig_value), config, new_field_typedef, ) else: # string value ( orig_values[i], orig_field_typedef, _, ) = length_delim.decode_lendelim_message( length_delim.encode_string(orig_value), config, new_field_typedef, ) orig_typedef[field_number][ "message_typedef"] = orig_field_typedef orig_type = "message" if new_type == "string" and orig_type == "bytes": # our bytes were accidently valid string new_type = "bytes" for i, new_value in enumerate(new_values): new_values[i], _ = length_delim.decode_bytes( length_delim.encode_string(new_value), 0) note("New values: %r" % new_values) # sort the lists with special handling for dicts orig_values.sort( key=lambda x: x if not isinstance(x, dict) else x.items()) new_values.sort( key=lambda x: x if not isinstance(x, dict) else x.items()) for orig_value, new_value in zip(orig_values, new_values): if orig_type == "message": check_message( orig_value, orig_typedef[field_number]["message_typedef"], new_value, new_typedef[field_number]["message_typedef"], ) else: assert orig_value == new_value check_message(message, typedef, decoded, typedef_out)