コード例 #1
0
    def testSecurityPolicy(self):
        security_policy_name = _UniqueName('my-policy')

        with self._SecurityPolicy(security_policy_name):
            # Update the security policy
            self.result_file_path = os.path.join(self.temp_path, 'exported')
            self.Run('compute security-policies export {0}'
                     ' --file-name "{1}" --file-format yaml'.format(
                         security_policy_name, self.result_file_path))
            security_policy = yaml.load_path(self.result_file_path)

            self.assertEqual('', security_policy['description'])
            self.assertEqual(1, len(security_policy['rules']))
            default_rule = security_policy['rules'][0]
            self.assertEqual('default rule', default_rule['description'])
            self.assertEqual(2147483647, default_rule['priority'])
            self.assertEqual('SRC_IPS_V1',
                             default_rule['match']['versionedExpr'])
            self.assertEqual('*',
                             default_rule['match']['config']['srcIpRanges'][0])
            self.assertEqual('allow', default_rule['action'])
            self.assertEqual(False, default_rule['preview'])

            security_policy['description'] = 'new description'
            security_policy['rules'] = []

            with open(self.result_file_path, 'w') as export_file:
                security_policies_utils.WriteToFile(export_file,
                                                    security_policy, 'json')

            self.Run('compute security-policies import {0}'
                     ' --file-name "{1}" --file-format yaml'.format(
                         security_policy_name, self.result_file_path))
            self.Run('compute security-policies export {0}'
                     ' --file-name "{1}" --file-format json'.format(
                         security_policy_name, self.result_file_path))
            security_policy = yaml.load_path(self.result_file_path)

            self.assertEqual('new description', security_policy['description'])
            self.assertEqual(1, len(security_policy['rules']))
            default_rule = security_policy['rules'][0]
            self.assertEqual('default rule', default_rule['description'])
            self.assertEqual(2147483647, default_rule['priority'])
            self.assertEqual('SRC_IPS_V1',
                             default_rule['match']['versionedExpr'])
            self.assertEqual('*',
                             default_rule['match']['config']['srcIpRanges'][0])
            self.assertEqual('allow', default_rule['action'])
            self.assertEqual(False, default_rule['preview'])
コード例 #2
0
ファイル: levels.py プロジェクト: barber223/AudioApp
def ParseBasicLevelConditions(path):
    """Parse a YAML representation of basic level conditions..

  Args:
    path: str, path to file containing basic level conditions

  Returns:
    list of Condition objects.

  Raises:
    ParseError: if the file could not be read into the proper object
  """

    data = yaml.load_path(path)
    if not data:
        raise ParseError(path, 'File is empty')

    messages = util.GetMessages()
    message_class = messages.Condition
    try:
        conditions = [encoding.DictToMessage(c, message_class) for c in data]
    except Exception as err:
        raise InvalidFormatError(path, str(err), message_class)

    _ValidateAllFieldsRecognized(path, conditions)
    return conditions
コード例 #3
0
ファイル: levels.py プロジェクト: gyaresu/dotfiles
def ParseBasicLevelConditions(path):
  """Parse a YAML representation of basic level conditions..

  Args:
    path: str, path to file containing basic level conditions

  Returns:
    list of Condition objects.

  Raises:
    ParseError: if the file could not be read into the proper object
  """

  data = yaml.load_path(path)
  if not data:
    raise ParseError(path, 'File is empty')

  messages = util.GetMessages()
  message_class = messages.Condition
  try:
    conditions = [encoding.DictToMessage(c, message_class) for c in data]
  except Exception as err:
    raise InvalidFormatError(path, str(err), message_class)

  _ValidateAllFieldsRecognized(path, conditions)
  return conditions
コード例 #4
0
def ParseYamlToRole(file_path, role_message_type):
    """Construct an IAM Role protorpc.Message from a Yaml formatted file.

  Args:
    file_path: Path to the Yaml IAM Role file.
    role_message_type: Role message type to convert Yaml to.
  Returns:
    a protorpc.Message of type role_message_type filled in from the Yaml
    role file.
  Raises:
    BadFileException if the Yaml file is malformed or does not exist.
  """
    role_to_parse = yaml.load_path(file_path)
    if 'stage' in role_to_parse:
        role_to_parse['stage'] = role_to_parse['stage'].upper()
    try:
        role = encoding.PyValueToMessage(role_message_type, role_to_parse)
    except (AttributeError) as e:
        # Raised when the YAML file is not properly formatted YAML role file.
        raise gcloud_exceptions.BadFileException(
            'Role file {0} is not a properly formatted YAML role file. {1}'.
            format(file_path, str(e)))
    except (apitools_messages.DecodeError, binascii.Error) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of role file {0} is not properly formatted. {1}'.format(
                file_path, str(e)))
    return role
コード例 #5
0
def ParseServicePerimetersBase(path, version=None):
    """Parse a YAML representation of a list of Service Perimeters.

  Args:
    path: str, path to file containing service perimeters
    version: str, api version of ACM to use for proto messages

  Returns:
    list of Service Perimeters objects.

  Raises:
    ParseError: if the file could not be read into the proper object
  """

    data = yaml.load_path(path)
    if not data:
        raise ParseError(path, 'File is empty')

    messages = util.GetMessages(version=version)
    message_class = messages.ServicePerimeter
    try:
        conditions = [encoding.DictToMessage(c, message_class) for c in data]
    except Exception as err:
        raise InvalidFormatError(path, six.text_type(err), message_class)

    _ValidateAllFieldsRecognized(path, conditions)
    return conditions
コード例 #6
0
ファイル: util.py プロジェクト: Guliux10/bchacks_deepbreath
def ParseWhoisContact(path):
    if path is None:
        return None
    raw_contact = yaml.load_path(path)
    messages = registrations.GetMessagesModule()
    parsed_contact = messages.WhoisContact(**raw_contact)
    return parsed_contact
コード例 #7
0
def GetCredentialsConfigFromFile(filename):
    """Returns the JSON content of a credentials config file.

  This function is useful when the content of a file need to be inspected first
  before determining how to handle it (how to initialize the underlying
  credentials). Only UTF-8 JSON files are supported.

  Args:
    filename (str): The filepath to the ADC file representing credentials.

  Returns:
    Optional(Mapping): The JSON content.

  Raises:
    BadCredentialFileException: If JSON parsing of the file fails.
  """

    try:
        # YAML is a superset of JSON.
        content = yaml.load_path(filename)
    except UnicodeDecodeError as e:
        raise BadCredentialFileException(
            'File {0} is not utf-8 encoded: {1}'.format(filename, e))
    except yaml.YAMLParseError as e:
        raise BadCredentialFileException(
            'Could not read json file {0}: {1}'.format(filename, e))

    # Require the JSON content to be an object.
    # Credentials and configs are always objects.
    if not isinstance(content, dict):
        raise BadCredentialFileException(
            'Could not read json file {0}'.format(filename))
    return content
コード例 #8
0
ファイル: tags_v1.py プロジェクト: saranraju90/multik8s
    def _ProcessTagFromFile(self, tag_template_ref, tag_file):
        """Processes a tag file into the request."""
        try:
            tag = yaml.load_path(tag_file)
            if not isinstance(tag, dict):
                raise InvalidTagFileError(
                    'Error parsing tag file: [invalid format]')
        except yaml.YAMLParseError as e:
            raise InvalidTagFileError('Error parsing tag file: [{}]'.format(e))

        tag_template = self.template_service.Get(
            self.messages.DatacatalogProjectsLocationsTagTemplatesGetRequest(
                name=tag_template_ref.RelativeName(), ))
        field_to_field_type = {}
        for additional_property in tag_template.fields.additionalProperties:
            message_type = additional_property.value.type
            field_to_field_type[additional_property.key] = (
                self._GetFieldType(message_type))

        additional_properties = []
        for field_id, field_value in six.iteritems(tag):
            if field_id not in field_to_field_type:
                raise InvalidTagError(
                    'Error parsing tag file: [{}] is not a valid field.'.
                    format(field_id))
            additional_properties.append(
                self.messages.GoogleCloudDatacatalogV1Tag.FieldsValue.
                AdditionalProperty(
                    key=field_id,
                    value=self._MakeTagField(field_to_field_type[field_id],
                                             field_value),
                ))

        return self.messages.GoogleCloudDatacatalogV1Tag.FieldsValue(
            additionalProperties=additional_properties, )
コード例 #9
0
ファイル: levels.py プロジェクト: PinTrees/novelhub
    def VersionedParseCustomLevel(path):
        """Parse a YAML representation of custom level conditions.

    Args:
      path: str, path to file containing custom level expression

    Returns:
      string of CEL expression.

    Raises:
      ParseError: if the file could not be read into the proper object
    """

        data = yaml.load_path(path)
        if not data:
            raise ParseError(path, 'File is empty')

        messages = util.GetMessages(version=api_version)
        message_class = messages.Expr
        try:
            expr = encoding.DictToMessage(data, message_class)
        except Exception as err:
            raise InvalidFormatError(path, six.text_type(err), message_class)

        _ValidateAllCustomFieldsRecognized(path, expr)
        return expr
コード例 #10
0
def ParseMessageFromYamlFile(path, message_type, error_message):
  """Parse a Yaml file.

  Args:
    path: Yaml file path. If path is None returns None.
    message_type: Message type to parse YAML into.
    error_message: Error message to print in case of parsing error.

  Returns:
    parsed message of type message_type.
  """
  if path is None:
    return None
  raw_message = yaml.load_path(path)
  try:
    parsed_message = encoding.PyValueToMessage(message_type, raw_message)
  except Exception as e:
    # This error may be slightly different in Py2 and Py3.
    raise exceptions.Error('{}: {}'.format(error_message, e))

  unknown_fields = []
  for message in encoding.UnrecognizedFieldIter(parsed_message):
    outer_message = ''.join([edge.field + '.' for edge in message[0]])
    unknown_fields += [outer_message + field for field in message[1]]
  unknown_fields.sort()
  if unknown_fields:
    raise exceptions.Error(
        ('{}.\nProblematic fields: \'{}\'').format(error_message,
                                                   ', '.join(unknown_fields)))

  return parsed_message
コード例 #11
0
ファイル: levels.py プロジェクト: PinTrees/novelhub
    def VersionedParseAccessLevels(path):
        """Parse a YAML representation of a list of Access Levels with basic/custom level conditions.

    Args:
      path: str, path to file containing basic/custom access levels

    Returns:
      list of Access Level objects.

    Raises:
      ParseError: if the file could not be read into the proper object
    """

        data = yaml.load_path(path)
        if not data:
            raise ParseError(path, 'File is empty')

        messages = util.GetMessages(version=api_version)
        message_class = messages.AccessLevel
        try:
            levels = [encoding.DictToMessage(c, message_class) for c in data]
        except Exception as err:
            raise InvalidFormatError(path, six.text_type(err), message_class)

        _ValidateAllLevelFieldsRecognized(path, levels)
        return levels
コード例 #12
0
ファイル: iam_util.py プロジェクト: gyaresu/dotfiles
def ParseYamlorJsonPolicyFile(policy_file_path, policy_message_type):
  """Create an IAM Policy protorpc.Message from a YAML or JSON formatted file.

  Args:
    policy_file_path: Path to the YAML or JSON IAM policy file.
    policy_message_type: Policy message type to convert YAML to.
  Returns:
    a protorpc.Message of type policy_message_type filled in from the input
    policy file.
  Raises:
    BadFileException if the YAML or JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
  policy_to_parse = yaml.load_path(policy_file_path)
  try:
    policy = encoding.PyValueToMessage(policy_message_type, policy_to_parse)
  except (AttributeError) as e:
    # Raised when the input file is not properly formatted YAML policy file.
    raise gcloud_exceptions.BadFileException(
        'Policy file [{0}] is not a properly formatted YAML or JSON '
        'policy file. {1}'
        .format(policy_file_path, str(e)))
  except (apitools_messages.DecodeError) as e:
    # DecodeError is raised when etag is badly formatted (not proper Base64)
    raise IamEtagReadError(
        'The etag of policy file [{0}] is not properly formatted. {1}'
        .format(policy_file_path, str(e)))
  return policy
コード例 #13
0
ファイル: iam_util.py プロジェクト: gyaresu/dotfiles
def ParseYamlToRole(file_path, role_message_type):
  """Construct an IAM Role protorpc.Message from a Yaml formatted file.

  Args:
    file_path: Path to the Yaml IAM Role file.
    role_message_type: Role message type to convert Yaml to.
  Returns:
    a protorpc.Message of type role_message_type filled in from the Yaml
    role file.
  Raises:
    BadFileException if the Yaml file is malformed or does not exist.
  """
  role_to_parse = yaml.load_path(file_path)
  if 'stage' in role_to_parse:
    role_to_parse['stage'] = role_to_parse['stage'].upper()
  try:
    role = encoding.PyValueToMessage(role_message_type, role_to_parse)
  except (AttributeError) as e:
    # Raised when the YAML file is not properly formatted YAML role file.
    raise gcloud_exceptions.BadFileException(
        'Role file {0} is not a properly formatted YAML role file. {1}'
        .format(file_path, str(e)))
  except (apitools_messages.DecodeError) as e:
    # DecodeError is raised when etag is badly formatted (not proper Base64)
    raise IamEtagReadError(
        'The etag of role file {0} is not properly formatted. {1}'
        .format(file_path, str(e)))
  return role
コード例 #14
0
def _SetImagePush(skaffold_file, shared_docker):
    """Set build.local.push value in skaffold file.

  Args:
    skaffold_file: Skaffold file handle.
    shared_docker: Boolean that is true if docker instance is shared between the
      kubernetes cluster and local docker builder.

  Yields:
    Path of skaffold file with build.local.push value set to the proper value.
  """
    # TODO(b/149935260): This function can be removed when
    # https://github.com/GoogleContainerTools/skaffold/issues/3668 is resolved.
    if not shared_docker:
        # If docker is not shared, use the default value (false). There is no need
        # to rewrite the skaffold file.
        yield skaffold_file
    else:
        skaffold_yaml = yaml.load_path(skaffold_file.name)
        local_block = yaml_helper.GetOrCreate(skaffold_yaml,
                                              ('build', 'local'))
        local_block['push'] = False
        with cross_platform_temp_file.NamedTempFile(
                yaml.dump(skaffold_yaml)) as patched_skaffold_file:
            yield patched_skaffold_file
コード例 #15
0
ファイル: local.py プロジェクト: twistedpair/google-cloud-sdk
    def WithServiceYaml(self, yaml_path):
        """Overrides settings with service.yaml and returns a new Settings object."""
        yaml_dict = yaml.load_path(yaml_path)
        message = messages_util.DictToMessageWithErrorCheck(
            yaml_dict, RUN_MESSAGES_MODULE.Service)
        knative_service = k8s_service.Service(message, RUN_MESSAGES_MODULE)

        replacements = {}
        # Planned attributes in
        # http://doc/1ah6LB9we-FSEhcBZ7_4XQlnOPClTyyQW_O3Q5WNUuJc#bookmark=id.j3st2l8a3s19
        try:
            [container] = knative_service.spec.template.spec.containers
        except ValueError:
            raise exceptions.Error(
                'knative Service must have exactly one container.')

        for var in container.env:
            replacements.setdefault('env_vars', {})[var.name] = var.value

        service_account_name = knative_service.spec.template.spec.serviceAccountName
        if service_account_name:
            replacements['credential'] = ServiceAccountSetting(
                name=service_account_name)

        return self.replace(**replacements)
コード例 #16
0
def GetResourceAndUpdateFieldsFromFile(file_path, resource_message_type):
  """Returns the resource message and update fields in file."""
  try:
    resource_to_parse = yaml.load_path(file_path)
  except yaml.YAMLParseError as e:
    raise exceptions.BadFileException(
        'Policy config file [{0}] cannot be parsed. {1}'.format(
            file_path, six.text_type(e)))
  except yaml.FileLoadError as e:
    raise exceptions.BadFileException(
        'Policy config file [{0}] cannot be opened or read. {1}'.format(
            file_path, six.text_type(e)))

  if not isinstance(resource_to_parse, dict):
    raise exceptions.BadFileException(
        'Policy config file [{0}] is not a properly formatted YAML or JSON '
        'file.'.format(file_path))

  update_fields = list(resource_to_parse.keys())

  try:
    resource = encoding.PyValueToMessage(resource_message_type,
                                         resource_to_parse)
  except (AttributeError) as e:
    raise exceptions.BadFileException(
        'Policy config file [{0}] is not a properly formatted YAML or JSON '
        'file. {1}'.format(file_path, six.text_type(e)))

  return (resource, update_fields)
コード例 #17
0
ファイル: iam_util.py プロジェクト: akaitatitatisann/backup
def ParseYamlorJsonPolicyFile(policy_file_path, policy_message_type):
    """Create an IAM Policy protorpc.Message from a YAML or JSON formatted file.

  Args:
    policy_file_path: Path to the YAML or JSON IAM policy file.
    policy_message_type: Policy message type to convert YAML to.
  Returns:
    a protorpc.Message of type policy_message_type filled in from the input
    policy file.
  Raises:
    BadFileException if the YAML or JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
    policy_to_parse = yaml.load_path(policy_file_path)
    try:
        policy = encoding.PyValueToMessage(policy_message_type,
                                           policy_to_parse)
    except (AttributeError) as e:
        # Raised when the input file is not properly formatted YAML policy file.
        raise gcloud_exceptions.BadFileException(
            'Policy file [{0}] is not a properly formatted YAML or JSON '
            'policy file. {1}'.format(policy_file_path, str(e)))
    except (apitools_messages.DecodeError) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of policy file [{0}] is not properly formatted. {1}'.
            format(policy_file_path, str(e)))
    return policy
コード例 #18
0
def _LoadTestConfig():
    """Loads test config based on environment variable setting."""
    env_config_file = encoding.GetEncodedValue(os.environ,
                                               'CLOUD_SDK_TEST_CONFIG')
    if not env_config_file:
        env_config_file = cli_test_base.CliTestBase.Resource(
            'tests', 'lib', 'e2e', 'integration_test_config.yaml')
        if not os.path.isfile(env_config_file):
            return None
    elif not os.path.isfile(env_config_file):
        if os.path.isabs(env_config_file):
            resource_config_file = None
        else:
            # Relative path and does not exists wrt to current dir.
            # Check wrt to project root.
            resource_config_file = cli_test_base.CliTestBase.Resource(
                env_config_file)
            if os.path.isfile(resource_config_file):
                env_config_file = resource_config_file
            else:
                resource_config_file = None
        if resource_config_file is None:
            raise ValueError(
                'CLOUD_SDK_TEST_CONFIG env var set to non-existent '
                'file {}'.format(env_config_file))

    return yaml.load_path(env_config_file)
コード例 #19
0
def _LoadData(path):
    try:
        return yaml.load_path(path)
    except yaml.FileLoadError as err:
        raise ParseError(path, 'Problem loading file: {}'.format(err))
    except yaml.YAMLParseError as err:
        raise ParseError(path, 'Problem parsing data as YAML: {}'.format(err))
コード例 #20
0
ファイル: jobs_util.py プロジェクト: bopopescu/GCP-speedtest
    def FromArgs(cls, args):
        """Build TrainingCustomInputServerConfig from argparse.Namespace."""
        tier = args.scale_tier

        if not tier:
            if args.config:
                data = yaml.load_path(args.config)
                tier = data.get('trainingInput', {}).get('scaleTier', None)

        parsed_tier = ScaleTierFlagMap().GetEnumForChoice(tier)
        return cls(
            scale_tier=parsed_tier,
            runtime_version=args.runtime_version,
            master_machine_type=args.master_machine_type,
            master_image_uri=args.master_image_uri,
            master_accelerator_type=(args.master_accelerator.get('type')
                                     if args.master_accelerator else None),
            master_accelerator_count=(args.master_accelerator.get('count')
                                      if args.master_accelerator else None),
            parameter_machine_type=args.parameter_server_machine_type,
            parameter_machine_count=args.parameter_server_count,
            parameter_image_uri=args.parameter_server_image_uri,
            parameter_accelerator_type=args.parameter_server_accelerator.get(
                'type') if args.parameter_server_accelerator else None,
            parameter_accelerator_count=args.parameter_server_accelerator.get(
                'count') if args.parameter_server_accelerator else None,
            worker_machine_type=args.worker_machine_type,
            worker_machine_count=args.worker_count,
            worker_image_uri=args.worker_image_uri,
            work_accelerator_type=(args.worker_accelerator.get('type')
                                   if args.worker_accelerator else None),
            work_accelerator_count=(args.worker_accelerator.get('count')
                                    if args.worker_accelerator else None))
コード例 #21
0
def ParseYamlOrJsonPolicyFile(policy_file_path, policy_message_type):
    """Create an IAM Policy protorpc.Message from a YAML or JSON formatted file.

  Returns the parsed policy object and FieldMask derived from input dict.
  Args:
    policy_file_path: Path to the YAML or JSON IAM policy file.
    policy_message_type: Policy message type to convert YAML to.
  Returns:
    a tuple of (policy, updateMask) where policy is a protorpc.Message of type
    policy_message_type filled in from the JSON or YAML policy file and
    updateMask is a FieldMask containing policy fields to be modified, based on
    which fields are present in the input file.
  Raises:
    BadFileException if the YAML or JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
    policy_to_parse = yaml.load_path(policy_file_path)
    try:
        policy = encoding.PyValueToMessage(policy_message_type,
                                           policy_to_parse)
        update_mask = ','.join(sorted(policy_to_parse.keys()))
    except (AttributeError) as e:
        # Raised when the input file is not properly formatted YAML policy file.
        raise gcloud_exceptions.BadFileException(
            'Policy file [{0}] is not a properly formatted YAML or JSON '
            'policy file. {1}'.format(policy_file_path, str(e)))
    except (apitools_messages.DecodeError, binascii.Error) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of policy file [{0}] is not properly formatted. {1}'.
            format(policy_file_path, str(e)))
    return (policy, update_mask)
コード例 #22
0
ファイル: kubeconfig.py プロジェクト: gyaresu/dotfiles
 def LoadFromFile(cls, filename):
   try:
     data = yaml.load_path(filename)
   except yaml.Error as error:
     raise Error('unable to load kubeconfig for {0}: {1}'.format(
         filename, error.inner_error))
   cls._Validate(data)
   return cls(data, filename)
コード例 #23
0
def ReadYaml(file_path, message_type):
    parsed_yaml = yaml.load_path(file_path)
    try:
        message = encoding.PyValueToMessage(message_type, parsed_yaml)
    except Exception as e:
        raise exceptions.ParseError(
            'Cannot parse YAML from file {0}: [{1}]'.format(file_path, e))
    return message
コード例 #24
0
    def testWriteEnvYaml(self):
        env = {'hello': 'world', 'foo': 'bar'}
        output_dir = self.CreateTempDir()
        util.WriteEnvYaml(env, output_dir)

        env_file_path = os.path.join(output_dir, 'env.yaml')
        yaml_env = yaml.load_path(env_file_path)
        self.assertEqual(env, yaml_env)
コード例 #25
0
 def LoadFromFile(cls, filename):
     try:
         data = yaml.load_path(filename)
     except yaml.Error as error:
         raise Error('unable to load kubeconfig for {0}: {1}'.format(
             filename, error.inner_error))
     cls._Validate(data)
     return cls(data, filename)
コード例 #26
0
def LoadYamlFromPath(path):
    try:
        data = yaml.load_path(path)
    except yaml.Error as e:
        raise cloudbuild_exceptions.ParserError(path, e.inner_error)
    if not yaml.dict_like(data):
        raise cloudbuild_exceptions.ParserError(
            path, "Could not parse as a dictionary.")
    return data
コード例 #27
0
    def Create(self,
               config_path,
               display_name,
               parent=None,
               max_trial_count=None,
               parallel_trial_count=None,
               algorithm=None,
               kms_key_name=None):
        """Creates a hyperparameter tuning job with given parameters.

    Args:
      config_path: str, the file path of the hyperparameter tuning job
        configuration.
      display_name: str, the display name of the created hyperparameter tuning
        job.
      parent: str, parent of the created hyperparameter tuning job. e.g.
        /projects/xxx/locations/xxx/
      max_trial_count: int, the desired total number of Trials. The default
        value is 1.
      parallel_trial_count: int, the desired number of Trials to run in
        parallel. The default value is 1.
      algorithm: AlgorithmValueValuesEnum, the search algorithm specified for
        the Study.
      kms_key_name: A customer-managed encryption key to use for the
        hyperparameter tuning job.

    Returns:
      Created hyperparameter tuning job.
    """
        job_spec = self.messages.GoogleCloudAiplatformV1beta1HyperparameterTuningJob(
        )

        if config_path:
            data = yaml.load_path(config_path)
            if data:
                job_spec = messages_util.DictToMessageWithErrorCheck(
                    data, self.messages.
                    GoogleCloudAiplatformV1beta1HyperparameterTuningJob)

        job_spec.maxTrialCount = max_trial_count
        job_spec.parallelTrialCount = parallel_trial_count

        if display_name:
            job_spec.displayName = display_name

        if algorithm and job_spec.studySpec:
            job_spec.studySpec.algorithm = algorithm

        if kms_key_name is not None:
            job_spec.encryptionSpec = self.messages.GoogleCloudAiplatformV1beta1EncryptionSpec(
                kmsKeyName=kms_key_name)

        return self._service.Create(
            self.messages.
            AiplatformProjectsLocationsHyperparameterTuningJobsCreateRequest(
                parent=parent,
                googleCloudAiplatformV1beta1HyperparameterTuningJob=job_spec))
コード例 #28
0
    def Create(self, location_ref, args):
        """Creates a model deployment monitoring job."""
        endpoint_ref = _ParseEndpoint(args.endpoint, location_ref)
        job_spec = self.messages.GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob(
        )
        if args.monitoring_config_from_file:
            data = yaml.load_path(args.monitoring_config_from_file)
            if data:
                job_spec = messages_util.DictToMessageWithErrorCheck(
                    data, self.messages.
                    GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob)
        else:
            job_spec.modelDeploymentMonitoringObjectiveConfigs = self._ConstructObjectiveConfigForCreate(
                location_ref, endpoint_ref.RelativeName(),
                args.feature_thresholds, args.dataset, args.bigquery_uri,
                args.data_format, args.gcs_uris, args.target_field,
                args.training_sampling_rate)
        job_spec.endpoint = endpoint_ref.RelativeName()
        job_spec.displayName = args.display_name

        job_spec.modelMonitoringAlertConfig = self.messages.GoogleCloudAiplatformV1beta1ModelMonitoringAlertConfig(
            emailAlertConfig=self.messages.
            GoogleCloudAiplatformV1beta1ModelMonitoringAlertConfigEmailAlertConfig(
                userEmails=args.emails))

        job_spec.loggingSamplingStrategy = self.messages.GoogleCloudAiplatformV1beta1SamplingStrategy(
            randomSampleConfig=self.messages.
            GoogleCloudAiplatformV1beta1SamplingStrategyRandomSampleConfig(
                sampleRate=args.prediction_sampling_rate))

        job_spec.modelDeploymentMonitoringScheduleConfig = self.messages.GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringScheduleConfig(
            monitorInterval='{}s'.format(
                six.text_type(3600 * int(args.monitoring_frequency))))

        if args.predict_instance_schema:
            job_spec.predictInstanceSchemaUri = args.predict_instance_schema

        if args.analysis_instance_schema:
            job_spec.analysisInstanceSchemaUri = args.analysis_instance_schema

        if args.log_ttl:
            job_spec.logTtl = '{}s'.format(
                six.text_type(86400 * int(args.log_ttl)))

        if args.sample_predict_request:
            instance_json = model_monitoring_jobs_util.ReadInstanceFromArgs(
                args.sample_predict_request)
            job_spec.samplePredictInstance = encoding.PyValueToMessage(
                extra_types.JsonValue, instance_json)

        return self._service.Create(
            self.messages.
            AiplatformProjectsLocationsModelDeploymentMonitoringJobsCreateRequest(
                parent=location_ref.RelativeName(),
                googleCloudAiplatformV1beta1ModelDeploymentMonitoringJob=
                job_spec))
コード例 #29
0
def GetRuntimes(args):
    """Gets a list of unique runtimes that the user is about to run.

  Args:
    args: A list of arguments (typically sys.argv).

  Returns:
    A set of runtime strings. If python27 and libraries section is populated
    in any of the yaml-files, 'python27-libs', a fake runtime id, will be part
    of the set, in conjunction with the original 'python27'.

  Raises:
    MultipleAppYamlError: The supplied application configuration has duplicate
      app yamls.
  """
    runtimes = set()
    for arg in args:
        # Check all the arguments to see if they're application yaml files or
        # directories that include yaml files.
        yaml_candidate = None
        if (os.path.isfile(arg)
                and os.path.splitext(arg)[1] in _YAML_FILE_EXTENSIONS):
            yaml_candidate = arg
        elif os.path.isdir(arg):
            for extension in _YAML_FILE_EXTENSIONS:
                fullname = os.path.join(arg, 'app' + extension)
                if os.path.isfile(fullname):
                    if yaml_candidate:
                        raise MultipleAppYamlError(
                            'Directory "{0}" contains conflicting files {1}'.
                            format(arg, ' and '.join(yaml_candidate)))

                    yaml_candidate = fullname

        if yaml_candidate:
            try:
                info = yaml.load_path(yaml_candidate)
            except yaml.Error:
                continue

            # safe_load can return arbitrary objects, we need a dict.
            if not isinstance(info, dict):
                continue
            # Grab the runtime from the yaml, if it exists.
            if 'runtime' in info:
                runtime = info.get('runtime')
                if type(runtime) == str:
                    if runtime == 'python27' and info.get('libraries'):
                        runtimes.add('python27-libs')
                    runtimes.add(runtime)
                    if runtime in _WARNING_RUNTIMES:
                        log.warn(_WARNING_RUNTIMES[runtime])
        elif os.path.isfile(os.path.join(arg, 'WEB-INF', 'appengine-web.xml')):
            # For unstanged Java App Engine apps, which may not have any yaml files.
            runtimes.add('java')
    return runtimes
コード例 #30
0
ファイル: local_config.py プロジェクト: oarcia/cherrybit.io
  def ParseFrom(cls, filename):
    """Parse local config from filename."""
    config = yaml.load_path(filename)

    # TODO(b/78124357): Implement validation in more general way.
    for field in config.keys():
      if field not in _VALID_FIELDS_SET:
        raise ConfigError('Invalid field {} in {}'.format(field, filename))

    return LocalConfig(config.get(_SERVICE, None), config.get(_REGION, None))
コード例 #31
0
ファイル: client.py プロジェクト: PinTrees/novelhub
  def Create(self,
             parent,
             specs=None,
             config_path=None,
             display_name=None,
             python_package_uri=None):
    """Constructs a request and sends it to the endpoint to create a custom job instance."""
    if not python_package_uri:
      python_package_uri = []

    job_spec = self.messages.GoogleCloudAiplatformV1beta1CustomJobSpec()
    if config_path:
      data = yaml.load_path(config_path)
      if data:
        job_spec = encoding.DictToMessage(
            data, self.messages.GoogleCloudAiplatformV1beta1CustomJobSpec)

    worker_pool_specs = []
    for spec in specs:
      machine_type = spec.get('machine-type')
      if not spec.get('replica-count'):
        replica_count = 1
      else:
        replica_count = int(spec.get('replica-count'))
      container_image_uri = spec.get('container-image-uri')
      python_image_uri = spec.get('python-image-uri')
      python_module = spec.get('python-module')
      machine_spec = self.messages.GoogleCloudAiplatformV1beta1MachineSpec(
          machineType=machine_type)

      worker_pool_spec = self.messages.GoogleCloudAiplatformV1beta1WorkerPoolSpec(
          replicaCount=replica_count, machineSpec=machine_spec)
      if container_image_uri:
        worker_pool_spec.containerSpec = self.messages.GoogleCloudAiplatformV1beta1ContainerSpec(
            imageUri=container_image_uri)

      # TODO(b/161753810): Pass args and commands to the python package
      # and container.
      if python_package_uri or python_image_uri or python_module:
        worker_pool_spec.pythonPackageSpec = self.messages.GoogleCloudAiplatformV1beta1PythonPackageSpec(
            executorImageUri=python_image_uri,
            packageUris=python_package_uri,
            pythonModule=python_module)

      worker_pool_specs.append(worker_pool_spec)
    if worker_pool_specs:
      job_spec.workerPoolSpecs = worker_pool_specs
    validation.ValidateWorkerPoolSpec(job_spec.workerPoolSpecs)

    custom_job = self.messages.GoogleCloudAiplatformV1beta1CustomJob(
        displayName=display_name, jobSpec=job_spec)

    return self._service.Create(
        self.messages.AiplatformProjectsLocationsCustomJobsCreateRequest(
            parent=parent, googleCloudAiplatformV1beta1CustomJob=custom_job))
コード例 #32
0
ファイル: osconfig_utils.py プロジェクト: herrizd/Push_ME
def GetResourceAndUpdateFieldsFromFile(file_path, resource_message_type):
  try:
    resource_to_parse = yaml.load_path(file_path)
    update_fields = list(resource_to_parse.keys())
    resource = encoding.PyValueToMessage(resource_message_type,
                                         resource_to_parse)
    return (resource, update_fields)
  except (AttributeError) as e:
    raise exceptions.BadFileException(
        'Policy config file [{0}] is not a properly formatted YAML or JSON '
        'file. {1}'.format(file_path, str(e)))
コード例 #33
0
 def testEmptyDefaultKubeconfig(self):
     path = kconfig.Kubeconfig.DefaultPath()
     file_utils.MakeDir(os.path.dirname(path))
     with open(path, 'w') as fp:
         fp.write('')
     self.assertTrue(os.path.isfile(path))
     kconfig.Kubeconfig.Default()
     data = yaml.load_path(path)
     self.assertIsNotNone(data)
     with open(path, 'r') as fp:
         self.assertEqual(fp.read(), _EMPTY_KUBECONFIG)
コード例 #34
0
 def _ReadExplanationMetadata(self, explanation_metadata_file):
   explanation_metadata = None
   if not explanation_metadata_file:
     raise gcloud_exceptions.BadArgumentException(
         '--explanation-metadata-file',
         'Explanation metadata file must be specified.')
   # Yaml is a superset of json, so parse json file as yaml.
   data = yaml.load_path(explanation_metadata_file)
   if data:
     explanation_metadata = messages_util.DictToMessageWithErrorCheck(
         data, self.messages.GoogleCloudAiplatformV1beta1ExplanationMetadata)
   return explanation_metadata
コード例 #35
0
def ParseTemplate(template_file, params=None, params_from_file=None):
  """Parse and apply params into a template file.

  Args:
    template_file: The path to the file to open and parse.
    params: a dict of param-name -> param-value
    params_from_file: a dict of param-name -> param-file

  Returns:
    The parsed template dict

  Raises:
    yaml.Error: When the template file cannot be read or parsed.
    ToolException: If any params are not provided or the YAML file is invalid.
  """
  params = params or {}
  params_from_file = params_from_file or {}

  joined_params = dict(params)
  for key, file_path in params_from_file.iteritems():
    if key in joined_params:
      raise exceptions.ToolException('Duplicate param key: ' + key)
    try:
      with open(file_path) as opened_file:
        joined_params[key] = opened_file.read()
    except IOError as e:
      raise exceptions.ToolException(
          'Could not load param key "{0}" from file "{1}": {2}'.format(
              key, file_path, e.strerror))

  template = yaml.load_path(template_file)

  if not isinstance(template, dict) or 'template' not in template:
    raise exceptions.ToolException(
        'Invalid template format.  Root must be a mapping with single '
        '"template" value')

  (template, missing_params, used_params) = ReplaceTemplateParams(
      template, joined_params)
  if missing_params:
    raise exceptions.ToolException(
        'Some parameters were present in the template but not provided on '
        'the command line: ' + ', '.join(sorted(missing_params)))
  unused_params = set(joined_params.keys()) - used_params
  if unused_params:
    raise exceptions.ToolException(
        'Some parameters were specified on the command line but not referenced '
        'in the template: ' + ', '.join(sorted(unused_params)))
  return template
コード例 #36
0
ファイル: type_providers.py プロジェクト: gyaresu/dotfiles
def AddOptions(messages, options_file, type_provider):
  """Parse api options from the file and add them to type_provider.

  Args:
    messages: The API message to use.
    options_file: String path expression pointing to a type-provider options
        file.
    type_provider: A TypeProvider message on which the options will be set.
  Returns:
    The type_provider after applying changes.
  Raises:
    exceptions.ConfigError: the api options file couldn't be parsed as yaml
  """
  if not options_file:
    return type_provider

  yaml_content = yaml.load_path(options_file)
  if yaml_content:
    if 'collectionOverrides' in yaml_content:
      type_provider.collectionOverrides = []

      for collection_override_data in yaml_content['collectionOverrides']:
        collection_override = messages.CollectionOverride(
            collection=collection_override_data['collection'])

        if 'options' in collection_override_data:
          collection_override.options = _OptionsFrom(
              messages, collection_override_data['options'])

        type_provider.collectionOverrides.append(collection_override)

    if 'options' in yaml_content:
      type_provider.options = _OptionsFrom(messages, yaml_content['options'])

    if 'credential' in yaml_content:
      type_provider.credential = _CredentialFrom(
          messages, yaml_content['credential'])

  return type_provider
コード例 #37
0
ファイル: versions_api.py プロジェクト: gyaresu/dotfiles
  def BuildVersion(self, name,
                   path=None,
                   deployment_uri=None,
                   runtime_version=None,
                   labels=None,
                   machine_type=None,
                   description=None,
                   framework=None):
    """Create a Version object.

    The object is based on an optional YAML configuration file and the
    parameters to this method; any provided method parameters override any
    provided in-file configuration.

    The file may only have the fields given in
    VersionsClientBase._ALLOWED_YAML_FIELDS specified; the only parameters
    allowed are those that can be specified on the command line.

    Args:
      name: str, the name of the version object to create.
      path: str, the path to the YAML file.
      deployment_uri: str, the deploymentUri to set for the Version
      runtime_version: str, the runtimeVersion to set for the Version
      labels: Version.LabelsValue, the labels to set for the version
      machine_type: str, the machine type to serve the model version on.
      description: str, the version description.
      framework: FrameworkValueValuesEnum, the ML framework used to train this
        version of the model.

    Returns:
      A Version object (for the corresponding API version).

    Raises:
      InvalidVersionConfigFile: If the file contains unexpected fields.
    """
    version = self.version_class()

    if path:
      try:
        data = yaml.load_path(path)
      except (yaml.Error) as err:
        raise InvalidVersionConfigFile(
            'Could not read Version configuration file [{path}]:\n\n'
            '{err}'.format(path=path, err=str(err.inner_error)))
      if data:
        version = encoding.DictToMessage(data, self.version_class)

    specified_fields = set([f.name for f in version.all_fields() if
                            getattr(version, f.name)])
    invalid_fields = (specified_fields - self._ALLOWED_YAML_FIELDS |
                      set(version.all_unrecognized_fields()))
    if invalid_fields:
      raise InvalidVersionConfigFile(
          'Invalid {noun} [{fields}] in configuration file [{path}]. '
          'Allowed fields: [{allowed}].'.format(
              noun=text.Pluralize(len(invalid_fields), 'field'),
              fields=', '.join(sorted(invalid_fields)),
              path=path,
              allowed=', '.join(sorted(self._ALLOWED_YAML_FIELDS))))

    additional_fields = {
        'name': name,
        'deploymentUri': deployment_uri,
        'runtimeVersion': runtime_version,
        'labels': labels,
        'machineType': machine_type,
        'description': description,
        'framework': framework
    }
    for field_name, value in additional_fields.items():
      if value is not None:
        setattr(version, field_name, value)

    return version
コード例 #38
0
ファイル: jobs.py プロジェクト: gyaresu/dotfiles
  def BuildTrainingJob(self,
                       path=None,
                       module_name=None,
                       job_name=None,
                       trainer_uri=None,
                       region=None,
                       job_dir=None,
                       scale_tier=None,
                       user_args=None,
                       runtime_version=None,
                       labels=None):
    """Builds a Cloud ML Engine Job from a config file and/or flag values.

    Args:
        path: path to a yaml configuration file
        module_name: value to set for moduleName field (overrides yaml file)
        job_name: value to set for jobName field (overrides yaml file)
        trainer_uri: List of values to set for trainerUri field (overrides yaml
          file)
        region: compute region in which to run the job (overrides yaml file)
        job_dir: Cloud Storage working directory for the job (overrides yaml
          file)
        scale_tier: ScaleTierValueValuesEnum the scale tier for the job
          (overrides yaml file)
        user_args: [str]. A list of arguments to pass through to the job.
        (overrides yaml file)
        runtime_version: the runtime version in which to run the job (overrides
          yaml file)
        labels: Job.LabelsValue, the Cloud labels for the job
    Returns:
        A constructed Job object.
    """
    job = self.job_class()

    if path:
      data = yaml.load_path(path)
      if data:
        job = encoding.DictToMessage(data, self.job_class)

    if job_name:
      job.jobId = job_name

    if labels is not None:
      job.labels = labels

    if not job.trainingInput:
      job.trainingInput = self.training_input_class()
    additional_fields = {
        'pythonModule': module_name,
        'args': user_args,
        'packageUris': trainer_uri,
        'region': region,
        'jobDir': job_dir,
        'scaleTier': scale_tier,
        'runtimeVersion': runtime_version
    }
    for field_name, value in additional_fields.items():
      if value is not None:
        setattr(job.trainingInput, field_name, value)

    return job