示例#1
0
 def DecodeField(buffer, pos, end, message, field_dict):
   (size, pos) = local_DecodeVarint(buffer, pos)
   new_pos = pos + size
   if new_pos > end:
     raise _DecodeError('Truncated string.')
   field_dict[key] = bytestr_to_string(buffer[pos:new_pos])
   return new_pos
示例#2
0
 def DecodeField(buffer, pos, end, message, field_dict):
     (size, pos) = local_DecodeVarint(buffer, pos)
     new_pos = pos + size
     if new_pos > end:
         raise _DecodeError('Truncated string.')
     field_dict[key] = bytestr_to_string(buffer[pos:new_pos])
     return new_pos
    def ConsumeString(self):
        """Consumes a string value.

    Returns:
      The string parsed.

    Raises:
      ParseError: If a string value couldn't be consumed.
    """
        bytes_str = self.ConsumeByteString()
        try:
            return bytestr_to_string(bytes_str)
        except UnicodeDecodeError as e:
            raise self._StringParseError(e)
  def ConsumeString(self):
    """Consumes a string value.

    Returns:
      The string parsed.

    Raises:
      ParseError: If a string value couldn't be consumed.
    """
    bytes_str = self.ConsumeByteString()
    try:
      return bytestr_to_string(bytes_str)
    except UnicodeDecodeError as e:
      raise self._StringParseError(e)
示例#5
0
 def DecodeRepeatedField(buffer, pos, end, message, field_dict):
     value = field_dict.get(key)
     if value is None:
         value = field_dict.setdefault(key, new_default(message))
     while 1:
         (size, pos) = local_DecodeVarint(buffer, pos)
         new_pos = pos + size
         if new_pos > end:
             raise _DecodeError('Truncated string.')
         value.append(bytestr_to_string(buffer[pos:new_pos]))
         # Predict that the next tag is another copy of the same repeated field.
         pos = new_pos + tag_len
         if buffer[new_pos:pos] != tag_bytes or new_pos == end:
             # Prediction failed.  Return.
             return new_pos
示例#6
0
 def DecodeRepeatedField(buffer, pos, end, message, field_dict):
   value = field_dict.get(key)
   if value is None:
     value = field_dict.setdefault(key, new_default(message))
   while 1:
     (size, pos) = local_DecodeVarint(buffer, pos)
     new_pos = pos + size
     if new_pos > end:
       raise _DecodeError('Truncated string.')
     value.append(bytestr_to_string(buffer[pos:new_pos]))
     # Predict that the next tag is another copy of the same repeated field.
     pos = new_pos + tag_len
     if buffer[new_pos:pos] != tag_bytes or new_pos == end:
       # Prediction failed.  Return.
       return new_pos
示例#7
0
 def __str__(self):
     return bytestr_to_string(text_format.MessageToString(self))
def _MergeField(tokenizer, message):
    """Merges a single protocol message field into a message.

  Args:
    tokenizer: A tokenizer to parse the field name and values.
    message: A protocol message to record the data.

  Raises:
    ParseError: In case of ASCII parsing problems.
  """
    message_descriptor = message.DESCRIPTOR
    if tokenizer.TryConsume(b'['):
        name = [tokenizer.ConsumeIdentifier()]
        while tokenizer.TryConsume(b'.'):
            name.append(tokenizer.ConsumeIdentifier())
        name = '.'.join(name)

        if not message_descriptor.is_extendable:
            raise tokenizer.ParseErrorPreviousToken(
                'Message type "%s" does not have extensions.' %
                message_descriptor.full_name)
        field = message.Extensions._FindExtensionByName(name)
        if not field:
            raise tokenizer.ParseErrorPreviousToken(
                'Extension "%s" not registered.' % name)
        elif message_descriptor != field.containing_type:
            raise tokenizer.ParseErrorPreviousToken(
                'Extension "%s" does not extend message type "%s".' %
                (name, message_descriptor.full_name))
        tokenizer.Consume(b']')
    else:
        name = tokenizer.ConsumeIdentifier()
        field = message_descriptor.fields_by_name.get(name, None)

        # Group names are expected to be capitalized as they appear in the
        # .proto file, which actually matches their type names, not their field
        # names.
        if not field:
            field = message_descriptor.fields_by_name.get(name.lower(), None)
            if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
                field = None

        if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP
                and field.message_type.name != name):
            field = None

        if not field:
            raise tokenizer.ParseErrorPreviousToken(
                'Message type "%s" has no field named "%s".' %
                (message_descriptor.full_name, name))

    if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
        tokenizer.TryConsume(b':')

        if tokenizer.TryConsume(b'<'):
            end_token = b'>'
        else:
            tokenizer.Consume(b'{')
            end_token = b'}'

        if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
            if field.is_extension:
                sub_message = message.Extensions[field].add()
            else:
                sub_message = getattr(message, field.name).add()
        else:
            if field.is_extension:
                sub_message = message.Extensions[field]
            else:
                sub_message = getattr(message, field.name)
            sub_message.SetInParent()

        while not tokenizer.TryConsume(end_token):
            if tokenizer.AtEnd():
                raise tokenizer.ParseErrorPreviousToken(
                    'Expected "%s".' % bytestr_to_string((end_token)))
            _MergeField(tokenizer, sub_message)
    else:
        _MergeScalarField(tokenizer, message, field)
def _MergeField(tokenizer, message):
  """Merges a single protocol message field into a message.

  Args:
    tokenizer: A tokenizer to parse the field name and values.
    message: A protocol message to record the data.

  Raises:
    ParseError: In case of ASCII parsing problems.
  """
  message_descriptor = message.DESCRIPTOR
  if tokenizer.TryConsume(b'['):
    name = [tokenizer.ConsumeIdentifier()]
    while tokenizer.TryConsume(b'.'):
      name.append(tokenizer.ConsumeIdentifier())
    name = '.'.join(name)

    if not message_descriptor.is_extendable:
      raise tokenizer.ParseErrorPreviousToken(
          'Message type "%s" does not have extensions.' %
          message_descriptor.full_name)
    field = message.Extensions._FindExtensionByName(name)
    if not field:
      raise tokenizer.ParseErrorPreviousToken(
          'Extension "%s" not registered.' % name)
    elif message_descriptor != field.containing_type:
      raise tokenizer.ParseErrorPreviousToken(
          'Extension "%s" does not extend message type "%s".' % (
              name, message_descriptor.full_name))
    tokenizer.Consume(b']')
  else:
    name = tokenizer.ConsumeIdentifier()
    field = message_descriptor.fields_by_name.get(name, None)

    # Group names are expected to be capitalized as they appear in the
    # .proto file, which actually matches their type names, not their field
    # names.
    if not field:
      field = message_descriptor.fields_by_name.get(name.lower(), None)
      if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
        field = None

    if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
        field.message_type.name != name):
      field = None

    if not field:
      raise tokenizer.ParseErrorPreviousToken(
          'Message type "%s" has no field named "%s".' % (
              message_descriptor.full_name, name))

  if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
    tokenizer.TryConsume(b':')

    if tokenizer.TryConsume(b'<'):
      end_token = b'>'
    else:
      tokenizer.Consume(b'{')
      end_token = b'}'

    if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
      if field.is_extension:
        sub_message = message.Extensions[field].add()
      else:
        sub_message = getattr(message, field.name).add()
    else:
      if field.is_extension:
        sub_message = message.Extensions[field]
      else:
        sub_message = getattr(message, field.name)
      sub_message.SetInParent()

    while not tokenizer.TryConsume(end_token):
      if tokenizer.AtEnd():
        raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
                                                bytestr_to_string((end_token)))
      _MergeField(tokenizer, sub_message)
  else:
    _MergeScalarField(tokenizer, message, field)
示例#10
0
 def __str__(self):
   return bytestr_to_string(text_format.MessageToString(self))