def import_file_set(file_set, modules=None, _open=open): """Import FileSet in to module space. Args: file_set: If string, open file and read serialized FileSet. Otherwise, a FileSet instance to import definitions from. modules: Dictionary of modules to update. Modules and their parents that do not exist will be created. If an existing module is found that matches file_descriptor.package, that module is updated with the FileDescriptor contents. _open: Used for dependency injection during tests. """ if isinstance(file_set, basestring): encoded_file = _open(file_set, 'rb') try: encoded_file_set = encoded_file.read() finally: encoded_file.close() file_set = protobuf.decode_message(descriptor.FileSet, encoded_file_set) for file_descriptor in file_set.files: # Do not reload built in protorpc classes. if not file_descriptor.package.startswith('protorpc.'): import_file(file_descriptor, modules=modules)
def __search_info(cls, request, info_class, model_to_message, customize_query=None): """Search over an Info subclass. Since all search request classes are very similar, it's possible to generalize how to do searches over them. Args: request: Search request received from client. info_class: The model.Info subclass to search. model_to_method: Function (model) -> message that transforms an instance of info_class in to the appropriate messages.Message subclass. customize_query: Function (request, query) -> None that adds additional filters to Datastore query based on specifics of that search message. Returns: Tuple (results, continuation): results: A list of messages satisfying the parameters of the request. None if there are no results. continuation: Continuation string for response if there are more results available. None if there are no more results available. """ # TODO(rafek): fetch_size from this request should take priority # over what is stored in continuation. if request.continuation: encoded_search, continuation = request.continuation.split(':', 1) decoded_search = base64.urlsafe_b64decode(encoded_search.encode('utf-8')) request = protobuf.decode_message(type(request), decoded_search) else: continuation = None encoded_search = unicode(base64.urlsafe_b64encode( protobuf.encode_message(request))) name_prefix = request.name_prefix query = info_class.search(name_prefix) query.order('name') if customize_query: customize_query(request, query) if continuation: # TODO(rafek): Pure query cursors are not safe for model with # query restrictions. Would technically need to be encrypted. query.with_cursor(continuation) fetch_size = request.fetch_size model_instance = query.fetch(fetch_size) results = None continuation = None if model_instance: results = [model_to_message(i) for i in model_instance] if len(model_instance) == fetch_size: cursor = query.cursor() continuation = u'%s:%s' % (encoded_search, query.cursor()) return results, continuation
def load_summary_protos(): """Load all valid summary records from memcache. Returns: A list of RequestStatProto instances, in reverse chronological order (i.e. most recent first). NOTE: This is limited to returning at most config.KEY_MODULUS records, since there are only that many distinct keys. See also make_key(). """ tmpl = '%s%s%s' % (recording.config.KEY_PREFIX, recording.config.KEY_TEMPLATE, recording.config.PART_SUFFIX) keys = [ tmpl % i for i in range( 0, recording.config.KEY_DISTANCE * recording.config.KEY_MODULUS, recording.config.KEY_DISTANCE) ] results = memcache.get_multi(keys, namespace=recording.config.KEY_NAMESPACE) records = [] for rec in results.itervalues(): try: pb = protobuf.decode_message(apphosting.RequestStatProto, rec) except Exception, err: logging.warn('Bad record: %s', err) else: records.append(pb)
def testProtobufUnrecognizedField(self): """Test that unrecognized fields are serialized and can be accessed.""" decoded = protobuf.decode_message(test_util.OptionalMessage, self.unexpected_tag_message) self.assertEquals(1, len(decoded.all_unrecognized_fields())) self.assertEquals(15, decoded.all_unrecognized_fields()[0]) self.assertEquals((5, messages.Variant.INT64), decoded.get_unrecognized_field_info(15))
def testProtobufDecodeDateTimeMessage(self): """Test what happens when decoding a DateTimeMessage.""" nested = NestedDateTimeMessage() nested.value = message_types.DateTimeMessage(milliseconds=2500) value = protobuf.decode_message(HasDateTimeMessage, protobuf.encode_message(nested)).value self.assertEqual(datetime.datetime(1970, 1, 1, 0, 0, 2, 500000), value)
def _StrToValue(self, serialized_value): """Deserialize an application object that was stored in memcache.""" if not self.pb_class: return serialized_value elif self.pb_class == int: return int(serialized_value) else: return protobuf.decode_message(self.pb_class, serialized_value)
def testProtobufDecodeDateTimeMessageWithTimeZone(self): """Test what happens when decoding a DateTimeMessage with a time zone.""" nested = NestedDateTimeMessage() nested.value = message_types.DateTimeMessage(milliseconds=12345678, time_zone_offset=60) value = protobuf.decode_message(HasDateTimeMessage, protobuf.encode_message(nested)).value self.assertEqual(datetime.datetime(1970, 1, 1, 3, 25, 45, 678000, tzinfo=util.TimeZoneOffset(60)), value)
def testProtobufDecodeDateTimeMessageWithTimeZone(self): """Test what happens when decoding a DateTimeMessage with a time zone.""" nested = NestedDateTimeMessage() nested.value = message_types.DateTimeMessage(milliseconds=12345678, time_zone_offset=60) value = protobuf.decode_message(HasDateTimeMessage, protobuf.encode_message(nested)).value self.assertEqual( datetime.datetime(1970, 1, 1, 3, 25, 45, 678000, tzinfo=util.TimeZoneOffset(60)), value)
def fileset_command(options, input_filename=None): """Generate source directory structure from FileSet. Args: options: Parsed command line options. input_filename: File to read protobuf FileSet from. If None will read from stdin. """ with open_input_file(input_filename) as input_file: descriptor_content = input_file.read() dest_dir = os.path.expanduser(options.dest_dir) if not os.path.isdir(dest_dir) and os.path.exists(dest_dir): fatal_error("Destination '%s' is not a directory" % dest_dir) file_set = protobuf.decode_message(descriptor.FileSet, descriptor_content) for file_descriptor in file_set.files: generate_file_descriptor(dest_dir, file_descriptor)
def file_command(options, input_filename=None, output_filename=None): """Generate a single descriptor file to Python. Args: options: Parsed command line options. input_filename: File to read protobuf FileDescriptor from. If None will read from stdin. output_filename: File to write Python source code to. If None will generate to stdout. """ with open_input_file(input_filename) as input_file: descriptor_content = input_file.read() if output_filename: output_file = open(output_filename, 'w') else: output_file = sys.stdout file_descriptor = protobuf.decode_message(descriptor.FileDescriptor, descriptor_content) generate_python.format_python_file(file_descriptor, output_file)
def load_full_proto(timestamp): """Load the full record for a given timestamp. Args: timestamp: The start_timestamp of the record, as a float in seconds (see make_key() for details). Returns: A RequestStatProto instance if the record exists and can be loaded; None otherwise. """ full_key = recording.make_key(timestamp) + recording.config.FULL_SUFFIX full_binary = memcache.get(full_key, namespace=recording.config.KEY_NAMESPACE) if full_binary is None: logging.info("No full record at %s", full_key) return None try: full = protobuf.decode_message(apphosting.RequestStatProto, full_binary) except Exception, err: logging.warn("Bad full record at %s: %s", full_key, err) return None
def load_full_proto(timestamp): """Load the full record for a given timestamp. Args: timestamp: The start_timestamp of the record, as a float in seconds (see make_key() for details). Returns: A RequestStatProto instance if the record exists and can be loaded; None otherwise. """ full_key = recording.make_key(timestamp) + recording.config.FULL_SUFFIX full_binary = memcache.get(full_key, namespace=recording.config.KEY_NAMESPACE) if full_binary is None: logging.info('No full record at %s', full_key) return None try: full = protobuf.decode_message(apphosting.RequestStatProto, full_binary) except Exception, err: logging.warn('Bad full record at %s: %s', full_key, err) return None
def load_summary_protos(): """Load all valid summary records from memcache. Returns: A list of RequestStatProto instances, in reverse chronological order (i.e. most recent first). NOTE: This is limited to returning at most config.KEY_MODULUS records, since there are only that many distinct keys. See also make_key(). """ tmpl = "%s%s%s" % (recording.config.KEY_PREFIX, recording.config.KEY_TEMPLATE, recording.config.PART_SUFFIX) keys = [ tmpl % i for i in range(0, recording.config.KEY_DISTANCE * recording.config.KEY_MODULUS, recording.config.KEY_DISTANCE) ] results = memcache.get_multi(keys, namespace=recording.config.KEY_NAMESPACE) records = [] for rec in results.itervalues(): try: pb = protobuf.decode_message(apphosting.RequestStatProto, rec) except Exception, err: logging.warn("Bad record: %s", err) else: records.append(pb)
def base64_decode_proto(proto_class, blob): return protobuf.decode_message(proto_class, base64_decode(blob))