Example #1
0
    def testAllJJSONSerialization(self):
        expected = """\
name: Doc1
boolean: false
dict:
 xyz: 012
 abc: 897
integer: 123
list:
- a
- b
- c
- d
---
 name: Doc2
 dict:
   xyz: 789
   abc: 456
 integer: 456
 list:
 - e
 - f
 - g
 - h
"""
        data = list(yaml.load_all(expected, round_trip=True))
        self.assertEqual(len(data), 2)
        json_out = yaml.dump_all_round_trip(data)
        data_after = list(yaml.load_all(json_out, round_trip=True))
        self.assertCountEqual(data, data_after)
Example #2
0
def CreatePodAndService(service_name, image_name):
    """Create a pod and service specification for a service.

  Args:
    service_name: Name of the service.
    image_name: Image tag.

  Returns:
    List of dictionary objects representing the service and image yaml.
  """
    yaml_text = _POD_AND_SERVICES_TEMPLATE.format(service=service_name,
                                                  image=image_name)
    return list(yaml.load_all(yaml_text))
def RecordSetsFromYamlFile(yaml_file,
                           include_extended_records=False,
                           api_version='v1'):
    """Returns record-sets read from the given yaml file.

  Args:
    yaml_file: file, A yaml file with records.
    include_extended_records: [bool], If extended record should be included
      (otherwise they are silently skipped).
    api_version: [str], the api version to use for creating the records.

  Returns:
    A (name, type) keyed dict of ResourceRecordSets that were obtained from the
    yaml file. Note that only records of supported types are retrieved. Also,
    the primary NS field for SOA records is discarded since that is
    provided by Cloud DNS.
  """
    record_sets = {}
    messages = core_apis.GetMessagesModule('dns', api_version)

    yaml_record_sets = yaml.load_all(yaml_file)
    for yaml_record_set in yaml_record_sets:
        rdata_type = _ToStandardEnumTypeSafe(yaml_record_set['type'])
        if rdata_type not in record_types.SUPPORTED_TYPES and (
                not include_extended_records or yaml_record_set['type']
                not in record_types.CLOUD_DNS_EXTENDED_TYPES):
            continue

        record_set = messages.ResourceRecordSet()
        # Need to assign kind to default value for useful equals comparisons.
        record_set.kind = record_set.kind
        record_set.name = yaml_record_set['name']
        record_set.ttl = yaml_record_set['ttl']
        record_set.type = yaml_record_set['type']
        record_set.rrdatas = yaml_record_set['rrdatas']

        if rdata_type is rdatatype.SOA:
            # Make primary NS name substitutable.
            record_set.rrdatas[0] = re.sub(r'\S+',
                                           '{0}',
                                           record_set.rrdatas[0],
                                           count=1)

        record_sets[(record_set.name, record_set.type)] = record_set

    return record_sets
Example #4
0
    def testCreateServiceAccountCredential(self):
        refresh_token = e2e_base.RefreshTokenAuth()
        local_credential_variable = EnvironmentVariable(
            'LOCAL_CREDENTIAL_PATH', _LOCAL_CREDENTIAL_FILE_PATH)

        pod_and_services_path = os.path.join(_LOCAL_DEVELOPMENT_DIR,
                                             'pods_and_services.yaml')
        with refresh_token as auth, local_credential_variable as _:
            command = (
                'code export --project {0} --kubernetes-file={1} '
                '--skaffold-file={2} --service-account={3} --dockerfile={4}'
            ).format(auth.Project(), pod_and_services_path,
                     _SKAFFOLD_FILE_PATH, self.local_account_email,
                     self.docker_file)
            self.Run(command)
            self.WriteInput('y')

        with open(pod_and_services_path) as pods_and_services_file:
            pods_and_services = list(yaml.load_all(pods_and_services_file))

        pod_specs = [
            spec for spec in pods_and_services if spec['kind'] == 'Deployment'
        ]
        self.assertGreaterEqual(len(pod_specs), 1)
        for spec in pod_specs:
            env_vars = yaml_helper.GetAll(spec,
                                          path=('spec', 'template', 'spec',
                                                'containers', 'env'))
            credential_vars = (
                var['value'] for var in env_vars
                if var['name'] == 'GOOGLE_APPLICATION_CREDENTIALS')
            env_var_path = next(credential_vars, None)
            self.assertEqual(
                env_var_path, '/etc/local_development_credential/'
                'local_development_service_account.json')

        secret_specs = [
            spec for spec in pods_and_services if spec['kind'] == 'Secret'
        ]
        self.assertEqual(len(secret_specs), 1)
        self.assertEqual(secret_specs[0]['metadata']['name'],
                         'local-development-credential')
Example #5
0
def RecordSetsFromYamlFile(yaml_file, api_version='v1'):
    """Returns record-sets read from the given yaml file.

  Args:
    yaml_file: file, A yaml file with records.
    api_version: [str], the api version to use for creating the records.

  Returns:
    A (name, type) keyed dict of ResourceRecordSets that were obtained from the
    yaml file. Note that only A, AAAA, CNAME, MX, PTR, SOA, SPF, SRV, and TXT
    record-sets are retrieved. Other record-set types are not supported by Cloud
    DNS. Also, the master NS field for SOA records is discarded since that is
    provided by Cloud DNS.
  """
    record_sets = {}
    messages = core_apis.GetMessagesModule('dns', api_version)

    yaml_record_sets = yaml.load_all(yaml_file)
    for yaml_record_set in yaml_record_sets:
        rdata_type = rdatatype.from_text(yaml_record_set['type'])
        if GetRdataTranslation(rdata_type) is None:
            continue

        record_set = messages.ResourceRecordSet()
        # Need to assign kind to default value for useful equals comparisons.
        record_set.kind = record_set.kind
        record_set.name = yaml_record_set['name']
        record_set.ttl = yaml_record_set['ttl']
        record_set.type = yaml_record_set['type']
        record_set.rrdatas = yaml_record_set['rrdatas']

        if rdata_type is rdatatype.SOA:
            # Make master NS name substitutable.
            record_set.rrdatas[0] = re.sub(r'\S+',
                                           '{0}',
                                           record_set.rrdatas[0],
                                           count=1)

        record_sets[(record_set.name, record_set.type)] = record_set

    return record_sets
Example #6
0
  def __init__(self, item_type, file_contents=None, file_path=None):
    self._file_contents = file_contents
    self._file_path = file_path
    self._item_type = item_type

    if not self._file_contents and not self._file_path:
      raise YamlConfigFileError('Could Not Initialize YamlConfigFile:'
                                'file_contents And file_path Are Both Empty')
    # Priority is to try to load from contents if specified. Else from file.
    if self._file_contents:
      try:
        items = yaml.load_all(self._file_contents, round_trip=True)
        self._data = [item_type(x) for x in items]
      except yaml.YAMLParseError as fe:
        raise YamlConfigFileError('Error Parsing Config File: [{}]'.format(fe))
    elif self._file_path:
      try:
        items = yaml.load_all_path(self._file_path, round_trip=True)
        self._data = [item_type(x) for x in items]
      except yaml.FileLoadError as fe:
        raise YamlConfigFileError('Error Loading Config File: [{}]'.format(fe))
Example #7
0
def RecordSetsFromYamlFile(yaml_file, api_version='v1'):
  """Returns record-sets read from the given yaml file.

  Args:
    yaml_file: file, A yaml file with records.
    api_version: [str], the api version to use for creating the records.

  Returns:
    A (name, type) keyed dict of ResourceRecordSets that were obtained from the
    yaml file. Note that only A, AAAA, CNAME, MX, PTR, SOA, SPF, SRV, and TXT
    record-sets are retrieved. Other record-set types are not supported by Cloud
    DNS. Also, the master NS field for SOA records is discarded since that is
    provided by Cloud DNS.
  """
  record_sets = {}
  messages = core_apis.GetMessagesModule('dns', api_version)

  yaml_record_sets = yaml.load_all(yaml_file)
  for yaml_record_set in yaml_record_sets:
    rdata_type = rdatatype.from_text(yaml_record_set['type'])
    if GetRdataTranslation(rdata_type) is None:
      continue

    record_set = messages.ResourceRecordSet()
    # Need to assign kind to default value for useful equals comparisons.
    record_set.kind = record_set.kind
    record_set.name = yaml_record_set['name']
    record_set.ttl = yaml_record_set['ttl']
    record_set.type = yaml_record_set['type']
    record_set.rrdatas = yaml_record_set['rrdatas']

    if rdata_type is rdatatype.SOA:
      # Make master NS name substitutable.
      record_set.rrdatas[0] = re.sub(r'\S+', '{0}', record_set.rrdatas[0],
                                     count=1)

    record_sets[(record_set.name, record_set.type)] = record_set

  return record_sets
Example #8
0
def LoadMessagesFromStream(stream,
                           msg_type,
                           msg_friendly_name,
                           skip_camel_case=None,
                           path=None):
    """Load multiple proto message from a stream of JSON or YAML text.

  Args:
    stream: file-like object containing the JSON or YAML data to be decoded.
    msg_type: The protobuf message type to create.
    msg_friendly_name: A readable name for the message type, for use in error
      messages.
    skip_camel_case: Contains proto field names or map keys whose values should
      not have camel case applied.
    path: str or None. Optional path to be used in error messages.

  Raises:
    ParserError: If there was a problem parsing the stream.
    ParseProtoException: If there was a problem interpreting the stream as the
    given message type.

  Returns:
    Proto message list of the messages that got decoded.
  """
    if skip_camel_case is None:
        skip_camel_case = []
    # Turn the data into a dict
    try:
        structured_data = yaml.load_all(stream, file_hint=path)
    except yaml.Error as e:
        raise ParserError(path, e.inner_error)

    return [
        _YamlToMessage(item, msg_type, msg_friendly_name, skip_camel_case,
                       path) for item in structured_data
    ]