コード例 #1
0
ファイル: arg_parsers.py プロジェクト: wilsonmar/gapi-node
    def _Parse(self, pairs):
      res = {}
      for pair in pairs:
        try:
          parts = tokenizer.Tokenize(pair, [_KV_PAIR_DELIMITER])
        except ValueError as e:
          raise ArgumentParsingError(self, e.message)
        if len(parts) != 3 or parts[1] != tokenizer.Separator(
            _KV_PAIR_DELIMITER):
          raise ArgumentParsingError(
              self, _GenerateErrorMessage(
                  'key/value pair must be of the form KEY{0}VALUE'.format(
                      _KV_PAIR_DELIMITER),
                  user_input=pair))

        key, value = parts[0], parts[2]
        if key in res:
          raise ArgumentParsingError(
              self, _GenerateErrorMessage('duplicate key', user_input=key))
        res[key] = self.ApplySpec(key, value)

      return res
コード例 #2
0
def _Parse(prop):
    """Parses the given tokens that represent a property."""
    tokens = tokenizer.Tokenize(prop, ['[', ']', '.'])
    tokens = [token for token in tokens if token]
    if not tokens:
        raise IllegalProperty('illegal property: {0}'.format(prop))

    res = []

    while tokens:
        if not isinstance(tokens[0], tokenizer.Literal):
            raise IllegalProperty('illegal property: {0}'.format(prop))

        res.append(_Key(tokens[0]))
        tokens = tokens[1:]

        # At this point, we expect to be either at the end of the input
        # stream or we expect to see a "." or "[".

        # We've reached the end of the input stream.
        if not tokens:
            break

        if not isinstance(tokens[0], tokenizer.Separator):
            raise IllegalProperty('illegal property: {0}'.format(prop))

        if isinstance(tokens[0], tokenizer.Separator) and tokens[0] == '[':
            if len(tokens) < 2:
                raise IllegalProperty('illegal property: {0}'.format(prop))

            tokens = tokens[1:]

            # Handles list slices (i.e., "[]").
            if (isinstance(tokens[0], tokenizer.Separator)
                    and tokens[0] == ']'):
                res.append(_Slice())
                tokens = tokens[1:]

            # Handles index accesses (e.g., "[1]").
            elif (isinstance(tokens[0], tokenizer.Literal)
                  and tokens[0].isdigit() and len(tokens) >= 2
                  and isinstance(tokens[1], tokenizer.Separator)
                  and tokens[1] == ']'):
                res.append(_Index(tokens[0]))
                tokens = tokens[2:]

            else:
                raise IllegalProperty('illegal property: {0}'.format(prop))

        # We've reached the end of input.
        if not tokens:
            break

        # We expect a "."; we also expect that the "." is not the last
        # token in the input.
        if (len(tokens) > 1 and isinstance(tokens[0], tokenizer.Separator)
                and tokens[0] == '.'):
            tokens = tokens[1:]
            continue
        else:
            raise IllegalProperty('illegal property: {0}'.format(prop))

    return res