Example #1
0
def CreateDeployment(service_name,
                     image_name,
                     memory_limit=None,
                     cpu_limit=None,
                     cpu_request=None):
  """Create a deployment specification for a service.

  Args:
    service_name: Name of the service.
    image_name: Image tag.
    memory_limit: Container memory limit.
    cpu_limit: Container cpu limit.
    cpu_request: Container cpu request.

  Returns:
    Dictionary object representing the deployment yaml.
  """
  deployment = yaml.load(_POD_TEMPLATE.format(service=service_name))
  container = yaml.load(
      _CONTAINER_TEMPLATE.format(service=service_name, image=image_name))
  if memory_limit is not None:
    limits = yaml_helper.GetOrCreate(container, ('resources', 'limits'))
    limits['memory'] = memory_limit
  if cpu_limit is not None:
    limits = yaml_helper.GetOrCreate(container, ('resources', 'limits'))
    limits['cpu'] = six.text_type(cpu_limit)
  if cpu_request is not None:
    requests = yaml_helper.GetOrCreate(container, ('resources', 'requests'))
    requests['cpu'] = six.text_type(cpu_request)
  containers = yaml_helper.GetOrCreate(
      deployment, ('spec', 'template', 'spec', 'containers'), constructor=list)
  containers.append(container)

  return deployment
Example #2
0
    def testAddSecret(self):
        yaml_text = textwrap.dedent("""\
    apiVersion: v1
    kind: Deployment
    metadata:
      name: my-service
    labels:
      service: my-service
    spec:
      template:
        spec:
          containers:
          - name: my-service-container
            image: image-name
            env:
            - name: PORT
              value: "8080"
            ports:
            - containerPort: 8080
    """)
        deployment = yaml.load(yaml_text)
        credential_generator = local.CredentialGenerator(lambda: None)
        credential_generator.ModifyDeployment(deployment)
        credential_generator.ModifyContainer(
            deployment['spec']['template']['spec']['containers'][0])

        expected_yaml_text = textwrap.dedent("""\
    apiVersion: v1
    kind: Deployment
    metadata:
      name: my-service
    labels:
      service: my-service
    spec:
      template:
        spec:
          containers:
          - name: my-service-container
            image: image-name
            env:
            - name: PORT
              value: "8080"
            - name: GOOGLE_APPLICATION_CREDENTIALS
              value: /etc/local_development_credential/local_development_service_account.json
            ports:
            - containerPort: 8080
            volumeMounts:
            - mountPath: /etc/local_development_credential
              name: local-development-credential
              readOnly: true
          volumes:
          - name: local-development-credential
            secret:
              secretName: local-development-credential
    """)
        self.assertEqual(deployment, yaml.load(expected_yaml_text))
Example #3
0
def ProcessScalingConfigsFile(scaling_configs_file):
  """Reads a JSON/YAML scaling_configs_file and returns collectiong of scaling configs object."""

  try:
    scaling_configs = json.loads(scaling_configs_file[0])
  except ValueError as e:
    try:
      scaling_configs = yaml.load(scaling_configs_file[0])
    except yaml.YAMLParseError as e:
      raise InvalidSpecFileError(
          'Error parsing scaling_configs file: [{}]'.format(e))

  messages = utils.GetMessages()
  message_class = messages.ScalingConfig
  selector = messages.LabelSelector()
  scaling_configs_message = []
  try:
    for sc in scaling_configs:
      esc = encoding.DictToMessage(sc, message_class)
      if not esc.selectors:
        # Add default selector if not set
        esc.selectors = [selector]
      # Convert yaml to json
      spec = yaml.load(esc.fleetAutoscalerSpec)
      spec_as_json_str = json.dumps(spec)
      esc.fleetAutoscalerSpec = spec_as_json_str
      scaling_configs_message.append(esc)
  except AttributeError:
    raise InvalidSchemaError(
        'Invalid schema: expected proper scaling configs')
  except _messages.ValidationError as e:
    # The most likely reason this is reaised is the file that is submitted is
    # following new format (json/yaml without string blob) and we will parse
    # with the new format
    for sc in scaling_configs:
      s = messages.ScalingConfig()
      if 'selectors' in sc:
        s.selectors = sc['selectors']
      else:
        # Add default selector if not set
        s.selectors = [selector]
      if 'name' in sc:
        s.name = sc['name']
      if 'schedules' in sc:
        s.schedules = sc['schedules']
      if 'fleetAutoscalerSpec' not in sc:
        raise InvalidSchemaError(
            'Invalid schema: expected proper scaling configs')
      spec_as_json_str = json.dumps(sc['fleetAutoscalerSpec'])
      s.fleetAutoscalerSpec = spec_as_json_str
      scaling_configs_message.append(s)
  return scaling_configs_message
Example #4
0
    def testIamCommands(self):
        cryptokey = next(self.cryptokey_namer)

        self.RunKms('keys', 'create', cryptokey, '--keyring', self.keyring,
                    '--location', self.glbl, '--purpose', 'encryption')

        self.RunKms('keys', 'get-iam-policy', cryptokey, '--keyring',
                    self.keyring, '--location', self.glbl)
        # default expected output for new cryptokey
        self.AssertOutputContains('etag: ACAB')
        self.ClearOutput()

        policy_file = self.Touch(self.temp_path,
                                 contents="""{{
  "etag": "ACAB",
  "bindings": [ {{ "members": ["serviceAccount:{0}"], "role": "roles/owner" }} ]
}}
""".format(self.Account()))
        self.RunKms('keys', 'set-iam-policy', cryptokey, '--keyring',
                    self.keyring, '--location', self.glbl, policy_file)
        etag = yaml.load(self.GetOutput())['etag']
        self.ClearOutput()

        files.WriteFileContents(policy_file, """{{
  "etag": "{0}"
}}
""".format(etag))
        self.RunKms('keys', 'set-iam-policy', cryptokey, '--keyring',
                    self.keyring, '--location', self.glbl, policy_file)
        # "bindings" is not mentioned, so it should be unchanged.
        self.AssertOutputContains("""bindings:
- members:
  - serviceAccount:{0}
  role: roles/owner
""".format(self.Account()))
        etag = yaml.load(self.GetOutput())['etag']
        self.ClearOutput()

        files.WriteFileContents(
            policy_file, """{{
  "etag": "{0}",
  "bindings": []
}}
""".format(etag))
        self.RunKms('keys', 'set-iam-policy', cryptokey, '--keyring',
                    self.keyring, '--location', self.glbl, policy_file)
        # "bindings" is set to [], so all entries should be removed.
        self.AssertOutputNotContains('bindings:')
Example #5
0
def ProcessFleetConfigsFile(fleet_configs_file):
    """Reads a JSON/YAML fleet_configs_file and returns collectiong of fleet configs object."""
    try:
        fleet_configs = json.loads(fleet_configs_file[0])
    except ValueError as e:
        try:
            fleet_configs = yaml.load(fleet_configs_file[0])
        except yaml.YAMLParseError as e:
            raise InvalidSpecFileError(
                'Error parsing fleet_configs file: [{}]'.format(e))

    messages = utils.GetMessages()
    message_class = messages.FleetConfig
    fleet_configs_message = []
    try:
        for fc in fleet_configs:
            f = encoding.DictToMessage(fc, message_class)
            spec = yaml.load(f.fleetSpec)
            spec_as_json_str = json.dumps(spec)
            f.fleetSpec = spec_as_json_str
            fleet_configs_message.append(f)
    except AttributeError:
        raise InvalidSchemaError(
            'Invalid schema: expected proper fleet configs')
    except _messages.ValidationError as e:
        # The most likely reason this is reaised is the file that is submitted is
        # following new format (json/yaml without string blob) and we will parse
        # with the new format
        for fc in fleet_configs:
            f = messages.FleetConfig()
            if 'name' in fc:
                f.name = fc['name']
            if 'fleetSpec' not in fc:
                raise InvalidSchemaError(
                    'Invalid schema: expected proper fleet configs')
            spec_as_json_str = json.dumps(fc['fleetSpec'])
            f.fleetSpec = spec_as_json_str
            fleet_configs_message.append(f)
    unrecognized_field_paths = _GetUnrecognizedFieldPaths(
        fleet_configs_message)
    if unrecognized_field_paths:
        error_msg_lines = [
            'Invalid schema, the following fields are unrecognized:'
        ]
        error_msg_lines += unrecognized_field_paths
        raise InvalidSchemaError('\n'.join(error_msg_lines))

    return fleet_configs_message
Example #6
0
def ChangeFromYamlFile(yaml_file, api_version='v1'):
  """Returns the change contained in the given yaml file.

  Args:
    yaml_file: file, A yaml file with change.
    api_version: [str], the api version to use for creating the change object.

  Returns:
    Change, the change contained in the given yaml file.

  Raises:
    CorruptedTransactionFileError: if the record_set_dictionaries are invalid
  """
  messages = apis.GetMessagesModule('dns', api_version)
  try:
    change_dict = yaml.load(yaml_file) or {}
  except yaml.YAMLParseError:
    raise CorruptedTransactionFileError()
  if (change_dict.get('additions') is None or
      change_dict.get('deletions') is None):
    raise CorruptedTransactionFileError()
  change = messages.Change()
  change.additions = _RecordSetsFromDictionaries(
      messages, change_dict['additions'])
  change.deletions = _RecordSetsFromDictionaries(
      messages, change_dict['deletions'])
  return change
def ProcessScalingConfigsFile(scaling_configs_file):
  """Reads a JSON/YAML scaling_configs_file and returns collectiong of scaling configs object."""

  try:
    scaling_configs = json.loads(scaling_configs_file[0])
  except ValueError as e:
    try:
      scaling_configs = yaml.load(scaling_configs_file[0])
    except yaml.YAMLParseError as e:
      raise InvalidSpecFileError(
          'Error parsing scaling_configs file: [{}]'.format(e))

  messages = utils.GetMessages()
  message_class = messages.ScalingConfig
  try:
    selector = messages.LabelSelector()
    scaling_configs_message = []
    for sc in scaling_configs:
      esc = encoding.DictToMessage(sc, message_class)
      if not esc.selectors:
        # Add default selector if not set
        esc.selectors = [selector]
      scaling_configs_message.append(esc)

  except AttributeError:
    raise InvalidSchemaError(
        'Invalid schema: expected proper scaling configs')
  except _messages.ValidationError as e:
    # Unfortunately apitools doesn't provide a way to get the path to the
    # invalid field here.
    raise InvalidSchemaError('Invalid schema: [{}]'.format(e))

  return scaling_configs_message
Example #8
0
def _GetYamlImports(import_object):
    """Extract the import section of a file.

  Args:
    import_object: The object in which to look for imports.

  Returns:
    A list of dictionary objects, containing the keys 'path' and 'name' for each
    file to import. If no name was found, we populate it with the value of path.

  Raises:
   ConfigError: If we cannont read the file, the yaml is malformed, or
       the import object does not contain a 'path' field.
  """
    content = import_object.GetContent()
    yaml_content = yaml.load(content)
    imports = []
    if yaml_content and IMPORTS in yaml_content:
        imports = yaml_content[IMPORTS]
        # Validate the yaml imports, and make sure the optional name is set.
        for i in imports:
            if PATH not in i:
                raise exceptions.ConfigError(
                    'Missing required field %s in import in file %s.' %
                    (PATH, import_object.full_path))
            # Populate the name field.
            if NAME not in i:
                i[NAME] = i[PATH]
    return imports
Example #9
0
def _GetYamlImports(import_object, globbing_enabled=False):
    """Extract the import section of a file.

  If the glob_imports config is set to true, expand any globs (e.g. *.jinja).
  Named imports cannot be used with globs that expand to more than one file.
  If globbing is disabled or a glob pattern does not expand to match any files,
  importer will use the literal string as the file path.

  Args:
    import_object: The object in which to look for imports.
    globbing_enabled: If true, will resolved glob patterns dynamically.

  Returns:
    A list of dictionary objects, containing the keys 'path' and 'name' for each
    file to import. If no name was found, we populate it with the value of path.

  Raises:
   ConfigError: If we cannont read the file, the yaml is malformed, or
       the import object does not contain a 'path' field.
  """
    parent_dir = None
    if not _IsUrl(import_object.full_path):
        parent_dir = os.path.dirname(os.path.abspath(import_object.full_path))
    content = import_object.GetContent()
    yaml_content = yaml.load(content)
    imports = []
    if yaml_content and IMPORTS in yaml_content:
        raw_imports = yaml_content[IMPORTS]
        # Validate the yaml imports, and make sure the optional name is set.
        for i in raw_imports:
            if PATH not in i:
                raise exceptions.ConfigError(
                    'Missing required field %s in import in file %s.' %
                    (PATH, import_object.full_path))
            glob_matches = []
            # Only expand globs if config set and the path is a local fs reference.
            if globbing_enabled and parent_dir and not _IsUrl(i[PATH]):
                # Set our working dir to the import_object's for resolving globs.
                with files.ChDir(parent_dir):
                    # TODO(b/111880973): Replace with gcloud glob supporting ** wildcards.
                    glob_matches = glob.glob(i[PATH])
                    glob_matches = _SanitizeWindowsPathsGlobs(glob_matches)
                # Multiple file case.
                if len(glob_matches) > 1:
                    if NAME in i:
                        raise exceptions.ConfigError((
                            'Cannot use import name %s for path glob in file %s that'
                            ' matches multiple objects.') %
                                                     (i[NAME],
                                                      import_object.full_path))
                    imports.extend([{NAME: g, PATH: g} for g in glob_matches])
                    continue
            # Single file case. (URL, discrete file, or single glob match)
            if len(glob_matches) == 1:
                i[PATH] = glob_matches[0]
            # Populate the name field.
            if NAME not in i:
                i[NAME] = i[PATH]
            imports.append(i)
    return imports
Example #10
0
def GetFileAsMessage(path, message):
  """Reads a YAML or JSON object of type message from local path.

  Args:
    path: A local path to an object specification in YAML or JSON format.
    message: The message type to be parsed from the file.

  Returns:
    Object of type message, if successful.
  Raises:
    files.Error, exceptions.ResourceManagerInputFileError
  """
  in_text = files.GetFileContents(path)
  if not in_text:
    raise exceptions.ResourceManagerInputFileError(
        'Empty policy file [{0}]'.format(path))

  # Parse it, first trying YAML then JSON.
  try:
    result = encoding.PyValueToMessage(message, yaml.load(in_text))
  except (ValueError, AttributeError, yaml.YAMLParseError):
    try:
      result = encoding.JsonToMessage(message, in_text)
    except (ValueError, DecodeError) as e:
      # ValueError is raised when JSON is badly formatted
      # DecodeError is raised when a tag is badly formatted (not Base64)
      raise exceptions.ResourceManagerInputFileError(
          'Policy file [{0}] is not properly formatted YAML or JSON '
          'due to [{1}]'.format(path, str(e)))
  return result
Example #11
0
def LoadTPUResourceSpecs(custom_help=None):
    """Read Yaml resource file and return a dict mapping name to resource spec."""
    resource_file_contents = pkg_resources.GetResource(TPU_YAML_RESOURCE_PATH,
                                                       'resources.yaml')
    if not resource_file_contents:
        raise calliope_exceptions.BadFileException(
            'Resources not found in path [{}]'.format(TPU_YAML_RESOURCE_PATH))

    resource_dict = yaml.load(resource_file_contents)
    resource_specs = []
    for resource_name in TPU_YAML_SPEC_TEMPLATE:
        spec = resource_dict.get(resource_name, None)
        if not spec:
            raise ValueError(
                'Resource spec [{}] not found in resource spec {}.yaml'.format(
                    resource_name, TPU_YAML_RESOURCE_PATH))

        # Don't modify template
        temp_spec = copy.deepcopy(TPU_YAML_SPEC_TEMPLATE[resource_name])

        temp_spec['spec'] = spec
        if custom_help and custom_help.get(resource_name):
            temp_spec['help_text'] = custom_help[resource_name]
        resource_specs.append(
            resource_arg_schema.YAMLResourceArgument.FromData(temp_spec))
    return resource_specs
Example #12
0
 def testLoadManifest(self):
     self.Touch(self.temp_path,
                'runtimes.yaml',
                contents=ManifestTest.MANIFEST_FILE)
     self.StartObjectPatch(storage_api.StorageClient,
                           'ReadObject',
                           side_effect=lambda x: io.BytesIO(
                               ManifestTest.MANIFEST_FILE.encode('utf-8')))
     for builder_root in [_URLFromFile(self.temp_path), 'gs://mybucket']:
         m = runtime_builders.Manifest.LoadFromURI(builder_root +
                                                   '/runtimes.yaml')
         self.assertEqual(m._data, yaml.load(ManifestTest.MANIFEST_FILE))
         self.assertEqual(
             set(m.Runtimes()),
             {'erlang', 'erlang-1', 'erlang-1.2', 'erlang-0', 'erlang-0.1'})
         self.assertIsNone(m.GetBuilderReference('foo'))
         self.assertEqual(
             m.GetBuilderReference('erlang-1.2'),
             runtime_builders.BuilderReference(
                 'erlang-1.2', builder_root + '/erlang-1.2-12345.yaml'))
         self.assertEqual(
             m.GetBuilderReference('erlang'),
             runtime_builders.BuilderReference(
                 'erlang-1.2', builder_root + '/erlang-1.2-12345.yaml'))
         self.assertEqual(
             m.GetBuilderReference('erlang-0'),
             runtime_builders.BuilderReference('erlang-0.1', None,
                                               'erlang-0.1 is deprecated.'))
Example #13
0
 def testMakeProcess_Distributed(self):
     package_dir = self.Resource('tests', 'unit', 'command_lib',
                                 'ml_engine', 'test_data', 'package_root')
     run_root = os.path.join(self.temp_path, 'run_dir')
     shutil.copytree(package_dir, run_root)
     module_name = 'test_package.test_task'
     out = io.BytesIO()
     args = ['foo']
     cluster = {'distributed': ['address_1']}
     stdout, _ = local_train.MakeProcess(
         module_name,
         run_root,
         args=args,
         task_type='distributed',
         index=0,
         cluster=cluster,
         stdout=subprocess.PIPE).communicate()
     out.write(stdout)
     self.assertEqual(
         yaml.load(out.getvalue()), {
             'TF_CONFIG': {
                 'job': {
                     'job_name': module_name,
                     'args': args
                 },
                 'task': {
                     'type': 'distributed',
                     'index': 0
                 },
                 'cluster': cluster,
                 'environment': 'cloud',
             },
             'PWD': run_root,
             'ARGS': ['foo']
         })
def GetFileAsMessage(path, message):
  """Reads a YAML or JSON object of type message from local path.

  Args:
    path: A local path to an object specification in YAML or JSON format.
    message: The message type to be parsed from the file.

  Returns:
    Object of type message, if successful.
  Raises:
    files.Error, exceptions.ResourceManagerInputFileError
  """
  in_text = files.ReadFileContents(path)
  if not in_text:
    raise exceptions.ResourceManagerInputFileError(
        'Empty policy file [{0}]'.format(path))

  # Parse it, first trying YAML then JSON.
  try:
    result = encoding.PyValueToMessage(message, yaml.load(in_text))
  except (ValueError, AttributeError, yaml.YAMLParseError):
    try:
      result = encoding.JsonToMessage(message, in_text)
    except (ValueError, DecodeError) as e:
      # ValueError is raised when JSON is badly formatted
      # DecodeError is raised when a tag is badly formatted (not Base64)
      raise exceptions.ResourceManagerInputFileError(
          'Policy file [{0}] is not properly formatted YAML or JSON '
          'due to [{1}]'.format(path, six.text_type(e)))
  return result
Example #15
0
def _ParseWeeklyCycleFromFile(args, messages):
    """Parses WeeklyCycle message from file contents specified in args."""
    weekly_cycle_dict = yaml.load(args.weekly_cycle_from_file)
    day_enum = messages.ResourcePolicyWeeklyCycleDayOfWeek.DayValueValuesEnum
    days_of_week = []
    for day_and_time in weekly_cycle_dict:
        if 'day' not in day_and_time or 'startTime' not in day_and_time:
            raise exceptions.InvalidArgumentException(
                args.GetFlag('weekly_cycle_from_file'),
                'Each JSON/YAML object in the list must have the following keys: '
                '[day, startTime].')
        day = day_and_time['day'].upper()
        try:
            weekday = times.Weekday.Get(day)
        except KeyError:
            raise exceptions.InvalidArgumentException(
                args.GetFlag('weekly_cycle_from_file'),
                'Invalid value for `day`: [{}].'.format(day))
        start_time = arg_parsers.Datetime.ParseUtcTime(
            day_and_time['startTime'])
        day, start_time = _ParseWeeklyDayAndTime(start_time, weekday)
        days_of_week.append(
            messages.ResourcePolicyWeeklyCycleDayOfWeek(day=day_enum(day),
                                                        startTime=start_time))
    return messages.ResourcePolicyWeeklyCycle(dayOfWeeks=days_of_week)
def ProcessFleetConfigsFile(fleet_configs_file):
  """Reads a JSON/YAML fleet_configs_file and returns collectiong of fleet configs object."""
  try:
    fleet_configs = json.loads(fleet_configs_file[0])
  except ValueError as e:
    try:
      fleet_configs = yaml.load(fleet_configs_file[0])
    except yaml.YAMLParseError as e:
      raise InvalidSpecFileError(
          'Error parsing fleet_configs file: [{}]'.format(e))

  messages = utils.GetMessages()
  message_class = messages.FleetConfig
  try:
    fleet_configs_message = [encoding.DictToMessage(fc, message_class)
                             for fc in fleet_configs]
  except AttributeError:
    raise InvalidSchemaError(
        'Invalid schema: expected proper fleet configs')
  except _messages.ValidationError as e:
    # Unfortunately apitools doesn't provide a way to get the path to the
    # invalid field here.
    raise InvalidSchemaError('Invalid schema: [{}]'.format(e))
  unrecognized_field_paths = _GetUnrecognizedFieldPaths(fleet_configs_message)
  if unrecognized_field_paths:
    error_msg_lines = ['Invalid schema, the following fields are unrecognized:']
    error_msg_lines += unrecognized_field_paths
    raise InvalidSchemaError('\n'.join(error_msg_lines))

  return fleet_configs_message
  def ListResources(self, project=None, organization=None, folder=None):
    """List all exportable resources.

    If parent (e.g. project, organization or folder) is passed then only list
    the exportable resources for that parent.

    Args:
      project: string, project to list exportable resources for.
      organization: string, organization to list exportable resources for.
      folder: string, folder to list exportable resources for.

    Returns:
      supported resources formatted output listing exportable resources.

    """
    if not (project or organization or folder):
      yaml_obj_list = yaml.load(
          self._CallPrintResources(output_format='yaml'), round_trip=True)
      return yaml_obj_list
    if project:
      msg_sfx = ' for project [{}]'.format(project)
    elif organization:
      msg_sfx = ' for organization [{}]'.format(organization)
    else:
      msg_sfx = ' for folder [{}]'.format(folder)

    with progress_tracker.ProgressTracker(
        message='Listing exportable resource types' + msg_sfx,
        aborted_message='Aborted Export.'):
      supported_kinds = self.ListSupportedResourcesForParent(
          project=project, organization=organization, folder=folder)
      supported_kinds = [x.AsDict() for x in supported_kinds]
      return supported_kinds
Example #18
0
  def testBlockText(self):
    data_string = """\
a:
  b: "this\\nis\\nsomething\\nwith\\nnewlines"
  c: d
"""
    data = yaml.load(data_string, round_trip=True, version=yaml.VERSION_1_2)
    self.assertEqual(
        'a:\n  b: "this\\nis\\nsomething\\nwith\\nnewlines"\n  c: d\n',
        yaml.dump(data, round_trip=True))
    context = updates.Context(data, 'a', updates.Mode.RESULT)
    context.ForKey('b').Update('this\nis\na\ndifferent\nthing\nwith\nnewlines',
                               [updates.Mode.RESULT])
    self.assertEqual("""\
a:
  b: |-
    this
    is
    a
    different
    thing
    with
    newlines
  c: d
""", yaml.dump(context.BackingData(), round_trip=True))
Example #19
0
def ParseReplicationFileContents(file_contents):
    """Reads replication policy file contents and returns its data.

  Reads the contents of a json or yaml replication policy file which conforms to
  https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication
  and returns data needed to create a Secret with that policy. If the file
  doesn't conform to the expected format, a BadFileException is raised.

  For Secrets with an automtic policy, locations is empty and keys has
  either 0 or 1 entry depending on whether the policy includes CMEK. For Secrets
  with a user managed policy, the number of keys returns is either 0 or is equal
  to the number of locations returned with the Nth key corresponding to the Nth
  location.

  Args:
      file_contents (str): The unvalidated contents fo the replication file.

  Returns:
      result (str): Either "user-managed" or "automatic".
      locations (list): Locations that are part of the user-managed replication
      keys (list): list of kms keys to be used for each replica.
  """
    try:
        replication_policy = json.loads(file_contents)
        return _ParseReplicationDict(replication_policy)
    except ValueError:
        # Assume that this is yaml.
        pass
    try:
        replication_policy = yaml.load(file_contents)
        return _ParseReplicationDict(replication_policy)
    except yaml.YAMLParseError:
        raise exceptions.BadFileException(
            'Failed to parse replication policy file as json or yaml.')
def ReadInstanceFromArgs(path):
  """Reads the instance from the given file path ('-' for stdin).

  Args:
    path: str or None, a path to a file ('-' for stdin) containing the JSON
      body.

  Returns:
    A instance.

  Raises:
    InvalidInstancesFileError: If the input file is invalid (invalid format or
        contains too many/zero instances), or an improper combination of input
        files was given.
  """
  data = console_io.ReadFromFileOrStdin(path, binary=True)
  with io.BytesIO(data) as f:
    try:
      instance = yaml.load(f)
    except ValueError:
      raise errors.InvalidInstancesFileError(
          'Input instance are not in JSON format. '
          'See `gcloud ai model-monitoring-jobs create --help` for details.')

    if not isinstance(instance, dict):
      raise errors.InvalidInstancesFileError(
          'Input instance are not in JSON format. '
          'See `gcloud ai model-monitoring-jobs create --help` for details.')

    return instance
 def testMaybeWriteAppYaml_GeneratedAppInfo(self):
     """Tests that file exists message not printed if app.yaml doesn't exist."""
     rt = ext_runtime.ExternalizedRuntime.Load(self.runtime_def_dir,
                                               self.env)
     runtime_config = yaml.load(
         textwrap.dedent("""\
     runtime: python
     env: 2
     entrypoint: run_me_some_python!
     handlers:
     - url: .*
       script: request
     """))
     params = ext_runtime.Params()
     self.Touch(directory=self.temp_path,
                name='exists',
                contents='my contents')
     configurator = rt.Detect(self.temp_path, params)
     configurator.SetGeneratedAppInfo(runtime_config)
     configurator.MaybeWriteAppYaml()
     self.assertNotIn(
         ('print', ext_runtime.FILE_EXISTS_MESSAGE.format('app.yaml')),
         self.log)
     self.AssertFileExistsWithContents(yaml.dump(runtime_config),
                                       self.temp_path, 'app.yaml')
Example #22
0
  def testRunStatus(self):
    cluster_name = 'mock-cluster'
    msg = self.features_api.messages
    msg_fs = self.features_api.messages.FeatureState
    lifecycle_state = (
        msg_fs.LifecycleStateValueValuesEnum
        .ENABLED)
    details_by_membership = (msg_fs.DetailsByMembershipValue(
        additionalProperties=[msg_fs.DetailsByMembershipValue.AdditionalProperty(
            key='projects/mock-project/locations/global/memberships/mock-cluster',
            value=msg.FeatureStateDetails(
                configmanagementFeatureState=msg.ConfigManagementFeatureState(
                    clusterName=cluster_name)))]))
    feature = self.features_api._MakeFeature(
        configmanagementFeatureSpec=self.features_api.messages
        .ConfigManagementFeatureSpec(),
        featureState=self._MakeFeatureState(lifecycle_state,
                                            details_by_membership))
    self.features_api.ExpectGet(feature)

    self.RunCommand(['status'])
    out = yaml.load(self.GetOutput())
    self.assertIsNotNone(out)
    split_out = out.split()
    log.warning(split_out)
    header = ['Name', 'Status', 'Last_Synced_Token', 'Sync_Branch',
              'Last_Synced_Time']
    for i, _ in enumerate(header):
      self.assertEqual(split_out[i], header[i])
    self.assertEqual(split_out[len(header)], cluster_name)
Example #23
0
  def _CreateVersionResource(self, service_config, manifest, version_id, build,
                             extra_config_settings=None):
    """Constructs a Version resource for deployment.

    Args:
      service_config: ServiceYamlInfo, Service info parsed from a service yaml
        file.
      manifest: Dictionary mapping source files to Google Cloud Storage
        locations.
      version_id: str, The version of the service.
      build: BuildArtifact, The build ID, image path, or build options.
      extra_config_settings: dict, client config settings to pass to the server
        as beta settings.

    Returns:
      A Version resource whose Deployment includes either a container pointing
        to a completed image, or a build pointing to an in-progress build.
    """

    parsed_yaml = service_config.parsed.ToYAML()
    config_dict = yaml.load(parsed_yaml)
    try:
      # pylint: disable=protected-access
      schema_parser = convert_yaml.GetSchemaParser(self.client._VERSION)
      json_version_resource = schema_parser.ConvertValue(config_dict)
    except ValueError, e:
      raise exceptions.ConfigError(
          '[{f}] could not be converted to the App Engine configuration '
          'format for the following reason: {msg}'.format(
              f=service_config.file, msg=e.message))
Example #24
0
def ProcessConfigOverrideFile(config_override_file, api_version):
  """Reads a JSON/YAML config_override_file and returns collection of config override object."""

  try:
    overrides = json.loads(config_override_file)
  except ValueError as e:
    try:
      overrides = yaml.load(config_override_file)
    except yaml.YAMLParseError as e:
      raise InvalidSpecFileError(
          'Error parsing config_override file: [{}]'.format(e))

  messages = GetMessages(api_version)
  message_class = messages.GameServerConfigOverride
  try:
    overrides_message = [
        encoding.DictToMessage(o, message_class) for o in overrides
    ]
  except AttributeError:
    raise InvalidSchemaError(
        'Invalid schema: unexpected game server config override(s) format.')
  except _messages.ValidationError as e:
    # Unfortunately apitools doesn't provide a way to get the path to the
    # invalid field here.
    raise InvalidSchemaError('Invalid schema: [{}]'.format(e))
  unrecognized_field_paths = _GetUnrecognizedFieldPaths(overrides_message)
  if unrecognized_field_paths:
    error_msg_lines = ['Invalid schema, the following fields are unrecognized:']
    error_msg_lines += unrecognized_field_paths
    raise InvalidSchemaError('\n'.join(error_msg_lines))

  return overrides_message
Example #25
0
    def FromPath(cls, resource_path):
        """Constructs a ResourceYAMLData from a standard resource_path.

    Args:
      resource_path: string, the dotted path of the resources.yaml file, e.g.
        iot.device or compute.instance.

    Returns:
      A ResourceYAMLData object.

    Raises:
      InvalidResourcePathError: invalid resource_path string.
    """
        match = re.search(_RESOURCE_PATH_PATTERN, resource_path)
        if not match:
            raise InvalidResourcePathError(
                'Invalid resource_path: [{}].'.format(resource_path))
        surface_name = match.group('surface_name')
        resource_name = match.group('resource_name')
        # Gets the directory name of the targeted YAML file.
        # Example: googlecloudsdk.command_lib.iot.
        dir_name = _RESOURCE_FILE_PREFIX + surface_name + '.'
        resource_file = pkg_resources.GetResource(dir_name,
                                                  _RESOURCE_FILE_NAME)
        # Loads the data from YAML file.
        resource_data = yaml.load(resource_file)[resource_name]
        return cls(resource_data)
def ChangeFromYamlFile(yaml_file, api_version='v1'):
    """Returns the change contained in the given yaml file.

  Args:
    yaml_file: file, A yaml file with change.
    api_version: [str], the api version to use for creating the change object.

  Returns:
    Change, the change contained in the given yaml file.

  Raises:
    CorruptedTransactionFileError: if the record_set_dictionaries are invalid
  """
    messages = apis.GetMessagesModule('dns', api_version)
    try:
        change_dict = yaml.load(yaml_file) or {}
    except yaml.YAMLParseError:
        raise CorruptedTransactionFileError()
    if (change_dict.get('additions') is None
            or change_dict.get('deletions') is None):
        raise CorruptedTransactionFileError()
    change = messages.Change()
    change.additions = _RecordSetsFromDictionaries(messages,
                                                   change_dict['additions'])
    change.deletions = _RecordSetsFromDictionaries(messages,
                                                   change_dict['deletions'])
    return change
  def _ParseMysqlSourceConfig(self, mysql_source_config_file, release_track):
    """Parses a mysql_sorce_config into the MysqlSourceConfig message."""
    data = console_io.ReadFromFileOrStdin(
        mysql_source_config_file, binary=False)
    try:
      mysql_sorce_config_head_data = yaml.load(data)
    except Exception as e:
      raise ds_exceptions.ParseError('Cannot parse YAML:[{0}]'.format(e))

    mysql_sorce_config_data_object = mysql_sorce_config_head_data.get(
        'mysql_source_config')
    mysql_rdbms_data = mysql_sorce_config_data_object if mysql_sorce_config_data_object else mysql_sorce_config_head_data

    include_objects_raw = mysql_rdbms_data.get(
        util.GetRDBMSV1alpha1ToV1FieldName('allowlist', release_track), {})
    include_objects_data = util.ParseMysqlSchemasListToMysqlRdbmsMessage(
        self._messages, include_objects_raw, release_track)

    exclude_objects_raw = mysql_rdbms_data.get(
        util.GetRDBMSV1alpha1ToV1FieldName('rejectlist', release_track), {})
    exclude_objects_data = util.ParseMysqlSchemasListToMysqlRdbmsMessage(
        self._messages, exclude_objects_raw, release_track)

    mysql_sourec_config_msg = self._messages.MysqlSourceConfig(
        includeObjects=include_objects_data,
        excludeObjects=exclude_objects_data)
    return mysql_sourec_config_msg
Example #28
0
def ReadRequest(input_file):
  """Reads a JSON request from the specified input file.

  Args:
    input_file: An open file-like object for the input file.

  Returns:
    A json object from the input file

  Raises:
    InvalidInstancesFileError: If the input file is invalid.
  """
  try:
    request = yaml.load(input_file)
  except ValueError:
    raise errors.InvalidInstancesFileError(
        'Input instances are not in JSON format. '
        'See `gcloud ai endpoints predict --help` for details.')

  if not isinstance(request, dict):
    raise errors.InvalidInstancesFileError(
        'Input instances are not in JSON format. '
        'See `gcloud ai endpoints predict --help` for details.')

  if 'instances' not in request:
    raise errors.InvalidInstancesFileError(
        'Invalid JSON request: missing "instances" attribute')

  if not isinstance(request['instances'], list):
    raise errors.InvalidInstancesFileError(
        'Invalid JSON request: "instances" must be a list')

  return request
Example #29
0
def ReadYaml(message_type, stream, schema_path=None):
    """Read yaml from a stream as a message.

  Args:
    message_type: Type of message to interpret the yaml as.
    stream: Stream from which yaml should be read.
    schema_path: Path to schema used to validate yaml, relative to schemas dir.

  Returns:
    Message that was read.

  Raises:
    ParseError: if yaml could not be parsed as the given message type.
  """
    parsed_yaml = yaml.load(stream)
    if schema_path:
        # If a schema is provided, validate against it.
        try:
            _ValidateYaml(parsed_yaml, schema_path)
        except jsonschema_exceptions.ValidationError as e:
            raise exceptions.ParseError('Validation Error: [{0}]'.format(
                e.message))
    try:
        message = encoding.PyValueToMessage(message_type, parsed_yaml)
    except Exception as e:
        raise exceptions.ParseError('Cannot parse YAML: [{0}]'.format(e))
    return message
  def _ParseGcsDestinationConfig(self, gcs_destination_config_file):
    """Parses a gcs_destination_config into the GcsDestinationConfig message."""
    data = console_io.ReadFromFileOrStdin(
        gcs_destination_config_file, binary=False)
    try:
      gcs_destination_head_config_data = yaml.load(data)
    except Exception as e:
      raise ds_exceptions.ParseError('Cannot parse YAML:[{0}]'.format(e))

    gcs_destination_config_data_object = gcs_destination_head_config_data.get(
        'gcs_destination_config')
    gcs_destination_config_data = gcs_destination_config_data_object if gcs_destination_config_data_object else gcs_destination_head_config_data

    path = gcs_destination_config_data.get('path', '')
    file_rotation_mb = gcs_destination_config_data.get('file_rotation_mb', {})
    file_rotation_interval = gcs_destination_config_data.get(
        'file_rotation_interval', {})
    gcs_dest_config_msg = self._messages.GcsDestinationConfig(
        path=path, fileRotationMb=file_rotation_mb,
        fileRotationInterval=file_rotation_interval)
    if 'avro_file_format' in gcs_destination_config_data:
      gcs_dest_config_msg.avroFileFormat = self._messages.AvroFileFormat()
    elif 'json_file_format' in gcs_destination_config_data:
      json_file_format_data = gcs_destination_config_data.get(
          'json_file_format')
      gcs_dest_config_msg.jsonFileFormat = self._messages.JsonFileFormat(
          compression=json_file_format_data.get('compression'),
          schemaFileFormat=json_file_format_data.get('schema_file_format'))
    else:
      raise ds_exceptions.ParseError(
          'Cannot parse YAML: missing file format.')
    return gcs_dest_config_msg
Example #31
0
 def _PopulateDefaultAuthConfig(self, configmap_obj, cluster_defaults):
     """Populates the default eventing config and adds an annotation."""
     auth_config = yaml.load(
         configmap_obj.data.get('default-auth-config', '{}'))
     auth_config['clusterDefaults'] = cluster_defaults
     configmap_obj.data['default-auth-config'] = yaml.dump(auth_config)
     configmap_obj.annotations[_CLUSTER_INITIALIZED_ANNOTATION] = 'true'
Example #32
0
def ProcessSchemaFromFile(schema_file):
    try:
        schema = yaml.load(schema_file)
    except yaml.YAMLParseError as e:
        raise InvalidSchemaFileError(
            'Error parsing schema file: [{}]'.format(e))
    return _SchemaToMessage(schema)
    def testRunDescribe(self):
        config_membership = '{0}/memberships/golden-cluster'.format(
            self.features_api.parent)
        lifecycle_state = (self.features_api.messages.FeatureState.
                           LifecycleStateValueValuesEnum.ENABLED)
        multiclusteringress_feature_spec = self._MakeMultiClusterIngressFeatureSpec(
            config_membership=config_membership)
        feature = self.features_api._MakeFeature(
            multiclusteringressFeatureSpec=multiclusteringress_feature_spec,
            featureState=self._MakeFeatureState(lifecycle_state))
        self.features_api.ExpectGet(feature)

        self.RunCommand(['describe'])

        out = yaml.load(self.GetOutput())
        self.assertIsNotNone(out)
        kwargs = {
            'multiclusteringressFeatureSpec': {
                'configMembership': config_membership
            },
            'featureState': {
                'lifecycleState': lifecycle_state.name
            }
        }
        self.assertEqual(out, kwargs)
Example #34
0
 def TryYaml():
   try:
     return yaml.load(input_string)
   except yaml.YAMLParseError as e:
     if hasattr(e.inner_error, 'problem_mark'):
       mark = e.inner_error.problem_mark
       log.error('Service config YAML had an error at position (%s:%s)'
                 % (mark.line+1, mark.column+1))
Example #35
0
def _LoadData(path):
  try:
    with open(path) as config_file:
      return yaml.load(config_file)
  except yaml.error.YAMLError as err:
    raise ParseError(path, 'Problem parsing data as YAML: {}'.format(err))
  except EnvironmentError as err:
    raise ParseError(path, 'Problem loading file: {}'.format(err))
Example #36
0
def LoadCloudbuildConfigFromStream(stream, messages, params=None,
                                   path=None):
  """Load a cloudbuild config file into a Build message.

  Args:
    stream: file-like object containing the JSON or YAML data to be decoded
    messages: module, The messages module that has a Build type.
    params: dict, parameters to substitute into the Build spec.
    path: str or None. Optional path to be used in error messages.

  Raises:
    NotFoundException: If the file does not exist.
    ParserError: If there was a problem parsing the file.
    BadConfigException: If the config file has illegal values.

  Returns:
    Build message, The build that got decoded.
  """
  # Turn the data into a dict
  try:
    structured_data = yaml.load(stream, file_hint=path)
  except yaml.Error as e:
    raise ParserError(path, e.inner_error)
  if not isinstance(structured_data, dict):
    raise ParserError(path, 'Could not parse into a message.')

  # Transform snake_case into camelCase.
  structured_data = _SnakeToCamel(structured_data)  # type: dict

  # Then, turn the dict into a proto message.
  try:
    build = _UnpackCheckUnused(structured_data, messages.Build)
  except Exception as e:
    # Catch all exceptions here beacuse a valid YAML can sometimes not be a
    # valid message, so we need to catch all errors in the dict to message
    # conversion.
    raise BadConfigException(path, '%s' % e)

  subst = structured_data.get('substitutions', {})
  # Validate the substitution keys in the message.
  for key in subst.iterkeys():
    if not _BUILTIN_SUBSTITUTION_REGEX.match(key):
      raise BadConfigException(
          path,
          'config cannot specify built-in substitutions')
  # Merge the substitutions passed in the flag.
  if params:
    subst.update(params)
  build.substitutions = cloudbuild_util.EncodeSubstitutions(subst, messages)

  # Some problems can be caught before talking to the cloudbuild service.
  if build.source:
    raise BadConfigException(path, 'config cannot specify source')
  if not build.steps:
    raise BadConfigException(path, 'config must list at least one step')

  return build
Example #37
0
def UpdateMetadata(metadata, args):
  """Update konlet metadata entry using user-supplied data."""
  # precondition: metadata.key == GCE_CONTAINER_DECLARATION

  manifest = yaml.load(metadata.value)

  if args.IsSpecified('container_image'):
    manifest['spec']['containers'][0]['image'] = args.container_image

  if args.IsSpecified('container_command'):
    manifest['spec']['containers'][0]['command'] = [args.container_command]

  if args.IsSpecified('clear_container_command'):
    manifest['spec']['containers'][0].pop('command', None)

  if args.IsSpecified('container_arg'):
    manifest['spec']['containers'][0]['args'] = args.container_arg

  if args.IsSpecified('clear_container_args'):
    manifest['spec']['containers'][0].pop('args', None)

  if args.container_privileged is True:
    manifest['spec']['containers'][0]['securityContext']['privileged'] = True

  if args.container_privileged is False:
    manifest['spec']['containers'][0]['securityContext']['privileged'] = False

  _UpdateMounts(manifest, args.remove_container_mounts or [],
                args.container_mount_host_path or [],
                args.container_mount_tmpfs or [])

  _UpdateEnv(manifest,
             itertools.chain.from_iterable(args.remove_container_env or []),
             args.container_env_file, args.container_env or [])

  if args.container_stdin is True:
    manifest['spec']['containers'][0]['stdin'] = True

  if args.container_stdin is False:
    manifest['spec']['containers'][0]['stdin'] = False

  if args.container_tty is True:
    manifest['spec']['containers'][0]['tty'] = True

  if args.container_tty is False:
    manifest['spec']['containers'][0]['tty'] = False

  if args.IsSpecified('container_restart_policy'):
    manifest['spec']['restartPolicy'] = RESTART_POLICY_API[
        args.container_restart_policy]

  metadata.value = yaml.dump(manifest)
Example #38
0
  def LoadFromURI(cls, uri):
    """Loads a manifest from a gs:// or file:// path.

    Args:
      uri: str, A gs:// or file:// URI

    Returns:
      Manifest, the loaded manifest.
    """
    log.debug('Loading runtimes manifest from [%s]', uri)
    with _Read(uri) as f:
      data = yaml.load(f, file_hint=uri)
    return cls(uri, data)
Example #39
0
def GetFileAsMessage(path, message, client):
  """Reads a YAML or JSON object of type message from path (local or GCS).

  Args:
    path: A local or GCS path to an object specification in YAML or JSON format.
    message: The message type to be parsed from the file.
    client: The storage_v1 client to use.

  Returns:
    Object of type message, if successful.
  Raises:
    files.Error, genomics_exceptions.GenomicsInputFileError
  """
  if path.startswith(GCS_PREFIX):
    # Download remote file to a local temp file
    tf = tempfile.NamedTemporaryFile(delete=False)
    tf.close()

    bucket, obj = _SplitBucketAndObject(path)
    storage_messages = core_apis.GetMessagesModule('storage', 'v1')
    get_request = storage_messages.StorageObjectsGetRequest(
        bucket=bucket, object=obj)
    try:
      download = transfer.Download.FromFile(tf.name, overwrite=True)
      client.objects.Get(get_request, download=download)
      del download  # Explicitly close the stream so the results are there
    except apitools_exceptions.HttpError as e:
      raise genomics_exceptions.GenomicsInputFileError(
          'Unable to read remote file [{0}] due to [{1}]'.format(path, str(e)))
    path = tf.name

  # Read the file.
  in_text = files.GetFileContents(path)
  if not in_text:
    raise genomics_exceptions.GenomicsInputFileError(
        'Empty file [{0}]'.format(path))

  # Parse it, first trying YAML then JSON.
  try:
    result = encoding.PyValueToMessage(message, yaml.load(in_text))
  except (ValueError, AttributeError, yaml.YAMLParseError):
    try:
      result = encoding.JsonToMessage(message, in_text)
    except (ValueError, DecodeError) as e:
      # ValueError is raised when JSON is badly formatted
      # DecodeError is raised when a tag is badly formatted (not Base64)
      raise genomics_exceptions.GenomicsInputFileError(
          'Pipeline file [{0}] is not properly formatted YAML or JSON '
          'due to [{1}]'.format(path, str(e)))
  return result
Example #40
0
def ConstructUpdateMaskFromPolicy(policy_file_path):
  """Construct a FieldMask based on input policy.

  Args:
    policy_file_path: Path to the JSON or YAML IAM policy file.
  Returns:
    a FieldMask containing policy fields to be modified, based on which fields
    are present in the input file.
  """
  policy_file = files.GetFileContents(policy_file_path)
  # Since json is a subset of yaml, parse file as yaml.
  policy = yaml.load(policy_file)

  # The IAM update mask should only contain top level fields. Sort the fields
  # for testing purposes.
  return ','.join(sorted(policy.keys()))
Example #41
0
def ReadEnvYaml(output_dir):
  """Reads and returns the environment values in output_dir/env.yaml file.

  Args:
    output_dir: str, Path of directory containing the env.yaml to be read.

  Returns:
    env: {str: str}
  """
  env_file_path = os.path.join(output_dir, 'env.yaml')
  try:
    with open(env_file_path, 'r') as env_file:
      return yaml.load(env_file)
  except IOError as err:
    if err.errno == errno.ENOENT:
      raise NoEnvYamlError(output_dir)
    else:
      raise err
Example #42
0
def FlattenLayoutOutputs(manifest_layout):
  """Takes the layout from a manifest and returns the flattened outputs.

  List output 'foo: [A,B]' becomes 'foo[0]: A, foo[1]: B'
  Dict output 'bar: {a:1, b:2}' becomes 'bar[a]: 1, bar[b]: 2'
  Lists and Dicts whose values are themselves lists or dicts are not expanded.

  Args:
    manifest_layout: The 'layout' field from the manifest.

  Returns:
    A list of {'name': X, 'finalValue': Y} dicts built out of the 'outputs'
    section of the layout.
  """

  layout = yaml.load(manifest_layout)

  if not isinstance(layout, dict) or 'outputs' not in layout:
    return []  # Empty list

  outputs = []

  basic_outputs = layout['outputs']
  for basic_output in basic_outputs:
    if 'finalValue' not in basic_output or 'name' not in basic_output:
      continue   # No value to process
    name = basic_output['name']
    value = basic_output['finalValue']
    if isinstance(value, list):
      for pos in range(len(value)):
        final_name = '%s[%d]' % (name, pos)
        outputs.append(_BuildOutput(final_name, value[pos]))
    elif isinstance(value, dict):
      for key in value:
        final_name = '%s[%s]' % (name, key)
        outputs.append(_BuildOutput(final_name, value[key]))
    else:
      outputs.append(_BuildOutput(name, value))

  return outputs
Example #43
0
def CreateLogMetric(metric_name, description=None, log_filter=None, data=None):
  """Returns a LogMetric message based on a data stream or a description/filter.

  Args:
    metric_name: str, the name of the metric.
    description: str, a description.
    log_filter: str, the filter for the metric's filter field.
    data: str, a stream of data read from a config file.

  Returns:
    LogMetric, the message representing the new metric.
  """
  messages = GetMessages()
  if data:
    contents = yaml.load(data)
    metric_msg = encoding.DictToMessage(contents,
                                        messages.LogMetric)
    metric_msg.name = metric_name
  else:
    metric_msg = messages.LogMetric(name=metric_name,
                                    description=description,
                                    filter=log_filter)
  return metric_msg
Example #44
0
def UpdateLogMetric(metric, description=None, log_filter=None, data=None):
  """Updates a LogMetric message given description, filter, and/or data.

  Args:
    metric: LogMetric, the original metric.
    description: str, updated description if any.
    log_filter: str, updated filter for the metric's filter field if any.
    data: str, a stream of data read from a config file if any.

  Returns:
    LogMetric, the message representing the updated metric.
  """
  messages = GetMessages()
  if description:
    metric.description = description
  if log_filter:
    metric.filter = log_filter
  if data:
    # Update the top-level fields only.
    update_data = yaml.load(data)
    metric_diff = encoding.DictToMessage(update_data, messages.LogMetric)
    for field_name in update_data:
      setattr(metric, field_name, getattr(metric_diff, field_name))
  return metric
Example #45
0
def DeserializeValue(value, fmt):
  """Parses the given JSON or YAML value."""
  if fmt == 'json':
    return json.loads(value)
  else:
    return yaml.load(value)
Example #46
0
def ParseAsYaml(value):
  return yaml.load(value)