def test_message_json_inverse(x): config = Config() typedef, message = x encoded = length_delim.encode_message(message, config, typedef) decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json( encoded, config=config, message_type=typedef) encoded_json = blackboxprotobuf.protobuf_from_json( decoded_json, config=config, message_type=typedef_out) decoded, typedef_out = blackboxprotobuf.decode_message( encoded_json, config=config, message_type=typedef) assert isinstance(encoded, bytearray) assert isinstance(decoded, dict) assert message == decoded
def applyType(self, typedef): """Apply a new typedef to the message. Throws an exception if type is invalid.""" # Convert to protobuf as old type and re-interpret as new type old_message_type = self.message_type json_data = self._text_editor.getText().tostring() protobuf_data = blackboxprotobuf.protobuf_from_json(json_data, old_message_type) new_json, message_type = blackboxprotobuf.protobuf_to_json(str(protobuf_data), typedef) # Should exception out before now if there is an issue # Set the message type and reparse with the new type self.message_type = message_type self._text_editor.setText(str(new_json)) message_hash = self.getMessageHash() self._extender.known_types[message_hash] = message_type
def test_modify_json(x, modify_num): modify_key = testMessage_typedef[modify_num]["name"] message = Test_pb2.TestMessage() for key, value in x.items(): setattr(message, key, value) encoded = message.SerializeToString() decoded_json, typedef = blackboxprotobuf.protobuf_to_json( encoded, testMessage_typedef) decoded = json.loads(decoded_json) # eliminate any cases where protobuf defaults out a field hypothesis.assume(modify_key in decoded) if isinstance(decoded[modify_key], str): mod_func = lambda x: "test" elif six.PY2 and isinstance(decoded[modify_key], unicode): mod_func = lambda x: six.u("test") elif isinstance(decoded[modify_key], bytes): mod_func = lambda x: b"test" elif isinstance(decoded[modify_key], six.integer_types): mod_func = lambda x: 10 elif isinstance(decoded[modify_key], float): mod_func = lambda x: 10 else: hypothesis.note("Failed to modify key: %s (%r)" % (modify_key, type(decoded[modify_key]))) assert False decoded[modify_key] = mod_func(decoded[modify_key]) x[modify_key] = mod_func(x[modify_key]) encoded = blackboxprotobuf.protobuf_from_json(json.dumps(decoded), testMessage_typedef) message = Test_pb2.TestMessage() message.ParseFromString(encoded) for key in decoded.keys(): hypothesis.note("Message value:") hypothesis.note(type(getattr(message, key))) hypothesis.note("Orig value:") hypothesis.note((x[key])) if key == "testBytes": x[key] = six.ensure_binary(x[key], encoding="latin1") assert getattr(message, key) == x[key]
def _check_type(self, typename): if typename in self._rejected_types: return False if typename in self._working_types: return True # if we don't have data yet, just quit early if not self._data: return False if typename not in default_config.known_types: return False typedef = default_config.known_types[typename] try: _, _ = blackboxprotobuf.protobuf_to_json(self._data, typedef) except BlackboxProtobufException as exc: self._callbacks.printError(traceback.format_exc()) self._rejected_types.add(typename) return False self._working_types.add(typename) return True
def test_decode_json(x): # Test with JSON payload message = Test_pb2.TestMessage() for key, value in x.items(): setattr(message, key, value) encoded = message.SerializeToString() decoded_json, typedef_json = blackboxprotobuf.protobuf_to_json( encoded, testMessage_typedef) hypothesis.note("Encoded JSON:") hypothesis.note(decoded_json) decoded = json.loads(decoded_json) hypothesis.note("Original value:") hypothesis.note(x) hypothesis.note("Decoded valuec:") hypothesis.note(decoded) for key in decoded.keys(): if key == "testBytes": decoded[key] = six.ensure_binary(decoded[key], encoding="latin1") assert x[key] == decoded[key]
def applyType(self, typedef): """Apply a new typedef to the message. Throws an exception if type is invalid.""" # store a reference for later mutation? self._source_typedef = typedef # Convert to protobuf as old type and re-interpret as new type old_message_type = self.message_type json_data = self._text_editor.getText().tostring() protobuf_data = blackboxprotobuf.protobuf_from_json(json_data, old_message_type) new_json, message_type = blackboxprotobuf.protobuf_to_json( str(protobuf_data), typedef ) # Should exception out before now if there is an issue self.message_type = message_type # if the json data was modified, then re-check our types if json_data != self._last_set_json: self._filtered_message_model.set_new_data(protobuf_data) self._last_set_json = str(new_json) self._text_editor.setText(str(new_json))
# notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import json import sys try: import blackboxprotobuf except ImportError: exit(-1) # blackboxprotobuf (forked version) is not installed data = sys.stdin.buffer.read() (message, typedef) = blackboxprotobuf.protobuf_to_json(data) print("Message:") print(message) print("") print("Type definition:") print(json.dumps(typedef, indent=2))
def setMessage(self, content, is_request, retry=True): """Get the data from the request/response and parse into JSON. sets self.message_type """ # Save original content self._original_content = content if is_request: self._content_info = self._helpers.analyzeRequest( self._controller.getHttpService(), content) else: self._content_info = self._helpers.analyzeResponse(content) self._is_request = is_request self._request = None self._request_content_info = None if not is_request: self._request = self._controller.getRequest() self._request_content_info = self._helpers.analyzeRequest( self._controller.getHttpService(), self._request) message_hash = self.getMessageHash() # Try to find saved messsage type self.message_type = None if message_hash in self._extender.known_types: self.message_type = self._extender.known_types[message_hash] try: protobuf_data = None if 'get_protobuf_data' in dir(user_funcs): protobuf_data = user_funcs.get_protobuf_data( content, is_request, self._content_info, self._helpers, self._request, self._request_content_info) if protobuf_data is None: protobuf_data = content[self._content_info.getBodyOffset( ):].tostring() protobuf_data = self.decodePayload(protobuf_data) json_data, self.message_type = blackboxprotobuf.protobuf_to_json( protobuf_data, self.message_type) # Save the message type self._extender.known_types[message_hash] = self.message_type self._original_json = json_data self._text_editor.setText(json_data) success = True except Exception as exc: success = False self._callbacks.printError(traceback.format_exc()) # Bring out of exception handler to avoid nexting handlers if not success: if retry: # Clear existing type info and retry prev_type = self.message_type self.message_type = None if message_hash in self._extender.known_types: del self._extender.known_types[message_hash] try: self.setMessage(content, is_request, False) except Exception as exc: # If it still won't parse, restore the types self.message_type = prev_type self._extender.known_types[message_hash] = prev_type else: self._text_editor.setText("Error parsing protobuf")
def setMessage(self, content, is_request, retry=True): """Get the data from the request/response and parse into JSON. sets self.message_type """ # Save original content self._original_content = content if is_request: self._content_info = self._helpers.analyzeRequest( self._controller.getHttpService(), content ) else: self._content_info = self._helpers.analyzeResponse(content) self._is_request = is_request self._request = None self._request_content_info = None if not is_request: self._request = self._controller.getRequest() self._request_content_info = self._helpers.analyzeRequest( self._controller.getHttpService(), self._request ) # how we remember which message type correlates to which endpoint self._message_hash = self.getMessageHash() # Try to find saved messsage type self.message_type = None self.message_type_name = None if self._message_hash in self._extension.saved_types: typename = self._extension.saved_types[self._message_hash] if typename in default_config.known_types: self.message_type_name = typename self.message_type = default_config.known_types[typename] else: del self._extension.saved_types[self._message_hash] try: protobuf_data = None if "get_protobuf_data" in dir(user_funcs): protobuf_data = user_funcs.get_protobuf_data( content, is_request, self._content_info, self._helpers, self._request, self._request_content_info, ) if protobuf_data is None: protobuf_data = content[self._content_info.getBodyOffset() :].tostring() protobuf_data = self.decodePayload(protobuf_data) # source_typedef will be the original, updatable version of the dict # TODO fix this hack self._original_data = protobuf_data self._filtered_message_model.set_new_data(protobuf_data) self._source_typedef = self.message_type json_data, self.message_type = blackboxprotobuf.protobuf_to_json( protobuf_data, self.message_type ) self._original_json = json_data self._original_typedef = self.message_type self._last_set_json = str(json_data) self._text_editor.setText(json_data) success = True except Exception as exc: success = False self._callbacks.printError( "Got error decoding protobuf binary: " + traceback.format_exc() ) # Bring out of exception handler to avoid nexting handlers if not success: if self._message_hash in self._extension.saved_types: del self._extension.saved_types[self._message_hash] self.setMessage(content, is_request, False) self._text_editor.setText("Error decoding protobuf") if self.message_type_name: self.forceSelectType(self.message_type_name)
#! /usr/bin/python import sys sys.path.insert(1, './blackboxprotobuf') sys.path.insert(1, '../../modules/protobuf-3.11.4/python') import blackboxprotobuf import pickle if len(sys.argv) != 2: print "Usage:", sys.argv[0], "PROTOBUF FILE TO DECODE" sys.exit(-1) f = open(sys.argv[1], "rb") data = f.read() message, typedef = blackboxprotobuf.decode_message(data) with open('decoded.pickle', 'wb') as handle: pickle.dump(message, handle, protocol=pickle.HIGHEST_PROTOCOL) json, typedef = blackboxprotobuf.protobuf_to_json(data) print(json) print(type(json)) f2 = open(sys.argv[1] + '.json', 'w') f2.write(json)
#!/usr/bin/env python import sys sys.path.insert(0, "../") import blackboxprotobuf as bbp typedef = {} # Take a protobuf binary from stdin and decode it to JSON protobuf = sys.stdin.read() json, typedef = bbp.protobuf_to_json(protobuf, typedef) print(json) print(typedef)
def resolve_meeting_code(meetcode): # print(repr(get_requestdata_template(code)[1].format(code))) rl = "https://meet.google.com/$rpc/google.rtc.meetings.v1.MeetingSpaceService/ResolveMeetingSpace" rh = { "content-type": "application/x-protobuf", "cookie": getenv('COOKIE'), "authorization": generate_sapisidhash(), "x-goog-api-key": getenv('GAPIKEY'), "x-goog-authuser": getenv('COOKIE_AUTHUSER'), "x-goog-encode-response-if-executable": "base64", "x-origin": "https://meet.google.com" } rd = protobuf_from_json( dumpsJSON({ '1': meetcode, '6': 1 }), { '1': { 'type': 'bytes', 'name': '' }, '6': { 'type': 'int', 'name': '' } } ) r = post( rl, headers=rh, data=rd ) if r.status_code != 200: # print(repr(r.status_code), repr(r.text)) if r.status_code == 401 and "Request had invalid authentication credentials." in r.text: raise RequestError("Authentication failed.", rl, rh, rd, r) if r.status_code == 403 and "The request is missing a valid API key." in r.text: raise RequestError("API key invalid.", rl, rh, rd, r) if r.status_code == 400 and "Request contains an invalid argument." in r.text: raise RequestError("Invalid argument during request.", rl, rh, rd, r) if r.status_code == 400 and "The conference is gone" in r.text: print(SERIALIZATION_DELIM.join(('result', "Meeting space ended.",))) exit(8) if r.status_code == 404 and "Requested meeting space does not exist." in r.text: print(SERIALIZATION_DELIM.join(('result', "No such meeting code.",))) exit(7) if r.status_code == 403 and "The requester cannot resolve this meeting" in r.text: exit(9) raise RequestError("Unknown error.", rl, rh, rd, r) p = protobuf_to_json(b64decode(r.text)) if getenv("PROTOBUF_DEBUG_LOG_ENDPOINT") is not None: post( getenv("PROTOBUF_DEBUG_LOG_ENDPOINT"), json={"content": "```\n{}\n```\n```json\n{}\n```".format(r.text, p[0])} ) p = loadsJSON(p[0]) spacecode = p['1'] meetcode = p['2'] if type(p['2']) is str else findall(r"https?://meet\.google\.com/(?:_meet/)?(\w{3}-\w{4}-\w{3})(?:\?.*)?$", p['3'])[0] meeturl = p['3'] lookupcode = p.get('7', None) gmeettoken = r.headers.get('x-goog-meeting-token', None) details = p.get('6') organization = None maxparticipants = None if details: organization = details.get('4') maxparticipants = details.get('6') return (spacecode, meetcode, meeturl, gmeettoken, lookupcode, organization, maxparticipants)
def test_anon_json_decode(x): config = Config() typedef, message = x encoded = blackboxprotobuf.encode_message(message, config=config, message_type=typedef) decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json( encoded, config=config) encoded_json = blackboxprotobuf.protobuf_from_json( decoded_json, config=config, message_type=typedef_out) decoded, typedef_out = blackboxprotobuf.decode_message(encoded_json, config=config) note("Original message: %r" % message) note("Decoded JSON: %r" % decoded_json) note("Decoded message: %r" % decoded) note("Original typedef: %r" % typedef) note("Decoded typedef: %r" % typedef_out) def check_message(orig, orig_typedef, new, new_typedef): for field_number in set(orig.keys()) | set(new.keys()): # verify all fields are there assert field_number in orig assert field_number in orig_typedef assert field_number in new assert field_number in new_typedef orig_values = orig[field_number] new_values = new[field_number] orig_type = orig_typedef[field_number]["type"] new_type = new_typedef[field_number]["type"] note("Parsing field# %s" % field_number) note("orig_values: %r" % orig_values) note("new_values: %r" % new_values) note("orig_type: %s" % orig_type) note("new_type: %s" % new_type) # Fields might be lists. Just convert everything to a list if not isinstance(orig_values, list): orig_values = [orig_values] assert not isinstance(new_values, list) new_values = [new_values] assert isinstance(orig_values, list) assert isinstance(new_values, list) # if the types don't match, then try to convert them if new_type == "message" and orig_type in ["bytes", "string"]: # if the type is a message, we want to convert the orig type to a message # this isn't ideal, we'll be using the unintended type, but # best way to compare. Re-encoding a message to binary might # not keep the field order new_field_typedef = new_typedef[field_number][ "message_typedef"] for i, orig_value in enumerate(orig_values): if orig_type == "bytes": ( orig_values[i], orig_field_typedef, _, ) = length_delim.decode_lendelim_message( length_delim.encode_bytes(orig_value), config, new_field_typedef, ) else: # string value ( orig_values[i], orig_field_typedef, _, ) = length_delim.decode_lendelim_message( length_delim.encode_string(orig_value), config, new_field_typedef, ) orig_typedef[field_number][ "message_typedef"] = orig_field_typedef orig_type = "message" if new_type == "string" and orig_type == "bytes": # our bytes were accidently valid string new_type = "bytes" for i, new_value in enumerate(new_values): new_values[i], _ = length_delim.decode_bytes( length_delim.encode_string(new_value), 0) note("New values: %r" % new_values) # sort the lists with special handling for dicts orig_values.sort( key=lambda x: x if not isinstance(x, dict) else x.items()) new_values.sort( key=lambda x: x if not isinstance(x, dict) else x.items()) for orig_value, new_value in zip(orig_values, new_values): if orig_type == "message": check_message( orig_value, orig_typedef[field_number]["message_typedef"], new_value, new_typedef[field_number]["message_typedef"], ) else: assert orig_value == new_value check_message(message, typedef, decoded, typedef_out)