Ejemplo n.º 1
0
 def WriteToDisk(self):
     """Writes Conifguration object to disk."""
     try:
         files.WriteFileAtomically(self.path, self.ToJson())
     except (TypeError, ValueError, OSError, IOError) as err:
         raise DockerConfigUpdateError('Error writing Docker configuration '
                                       'to disk: {}'.format(str(err)))
Ejemplo n.º 2
0
def GenerateMappingFileTemplate(api_name, message_type, skip_fields=None,
                                file_path=None, api_version=None,
                                known_mappings=None):
  """Create a stub Apitools To KRM mapping file for specified Apitools message.

  Args:
      api_name: string, The api containing the message.
      message_type: string, The message to generate mapping for.
      skip_fields: [string], A list of field paths to exclude from mapping file.
      file_path: string, path of destination file. If None, will write result to
        stdout.
      api_version: Version of the api to retrieve the message type from. If None
        will use default API version.
      known_mappings: {string: object}, Fields to pre-initialize in the mapping.

  Returns:
    The path to the created file or file contents if no path specified.
  Raises:
    InvalidDataError, if api or message are invalid.
  """
  try:
    api_obj = registry.GetAPI(api_name, api_version)
    all_messages = api_obj.GetMessagesModule()
    message = getattr(all_messages, message_type)
    mapping_object = _BuildYamlMappingTemplateFromMessage(message)

    if skip_fields:  # Remove Skipped/Unmapped message fields
      for path in skip_fields:
        file_parsers.DeleteItemInDict(mapping_object, path)

    if known_mappings:
      for path, value in six.iteritems(known_mappings):
        file_parsers.FindOrSetItemInDict(mapping_object, path, set_value=value)

    yaml.convert_to_block_text(mapping_object)
    output = yaml.dump(mapping_object, round_trip=True)

    if file_path:
      files.WriteFileAtomically(file_path, output)
      output = file_path

    return output
  except (AttributeError, registry.Error) as ae:
    raise InvalidDataError('Error retrieving message [{message}] from '
                           'API [{api}/{ver}] :: {error}'.format(
                               message=message_type,
                               api=api_name,
                               ver=api_version or 'default',
                               error=ae))
Ejemplo n.º 3
0
def WriteDockerAuthConfig(structure):
    """Write out a complete set of Docker authorization entries.

  This is public only to facilitate testing.

  Args:
    structure: The dict of authorization mappings to write to the
               Docker configuration file.
  """
    # Not using DockerConfigInfo here to be backward compatible with
    # UpdateDockerCredentials which should still work if Docker is not installed
    path, is_new_format = client_lib.GetDockerConfigPath()
    contents = client_lib.ReadConfigurationFile(path)
    if is_new_format:
        full_cfg = contents
        full_cfg['auths'] = structure
        file_contents = json.dumps(full_cfg, indent=2)
    else:
        file_contents = json.dumps(structure, indent=2)
    files.WriteFileAtomically(path, file_contents)
Ejemplo n.º 4
0
    def SaveToFile(self):
        """Save kubeconfig to file.

    Raises:
      Error: don't have the permission to open kubeconfig or plugin cache file.
    """
        self._data['clusters'] = list(self.clusters.values())
        self._data['users'] = list(self.users.values())
        self._data['contexts'] = list(self.contexts.values())
        with file_utils.FileWriter(self._filename, private=True) as fp:
            yaml.dump(self._data, fp)

        # GKE_GCLOUD_AUTH_PLUGIN_CACHE_FILE_NAME is used by GKE_GCLOUD_AUTH_PLUGIN
        # Erase cache file everytime kubeconfig is updated. This allows for a reset
        # of the cache. Previously, credentials were cached in the kubeconfig file
        # and updating the kubeconfig allowed for a "reset" of the cache.
        dirname = os.path.dirname(self._filename)
        gke_gcloud_auth_plugin_file_path = os.path.join(
            dirname, GKE_GCLOUD_AUTH_PLUGIN_CACHE_FILE_NAME)
        if os.path.exists(gke_gcloud_auth_plugin_file_path):
            file_utils.WriteFileAtomically(gke_gcloud_auth_plugin_file_path,
                                           '')
def GetAssetInventoryListInput(folder,
                               project,
                               org,
                               file_path=None,
                               asset_types_filter=None,
                               filter_expression=None,
                               krm_kind_filter=None):
  """Generate a AssetInventory export data set from api list call.


  Calls AssetInventory List API via shared api client (AssetListClient) and
  generates a list of exportable assets. If `asset_types_filter`,
  `gvk_kind_filter` or `filter_expression` is passed, it will filter out
  non-matching resources. If `file_path` is None list will be returned as a
  string otherwise it is written to disk at specified path.

  Args:
    folder: string, folder parent for resource export.
    project: string, project parent for resource export.
    org: string, organization parent for resource export.
    file_path: string, path to write AssetInventory export file to. If None,
      results are returned as string.
    asset_types_filter: [string], list of asset types to include in the output
      file.
    filter_expression: string, a valid gcloud filter expression. See `gcloud
      topic filter` for more details.
    krm_kind_filter: [string], list of KrmKinds corresponding to asset types to
      include in the output.

  Returns:
    string: file path where AssetInventory data has been written or raw data if
      `temp_file_path` is None. Returns None if no results returned from API.

  Raises:
    RequiredArgumentException: If none of folder, project or org is provided.
    ResourceNotFoundException: If no resources are found or returned from
      filtering.
    ClientException: Writing file to disk.
  """
  root_asset = asset_utils.GetParentNameForExport(
      organization=org, project=project, folder=folder)
  asset_client = client_util.AssetListClient(root_asset)
  filter_func = (
      resource_filter.Compile(filter_expression.strip()).Evaluate
      if filter_expression else None)
  asset_filter = asset_types_filter or []
  if krm_kind_filter:
    kind_filters = _BuildAssetTypeFilterFromKind(krm_kind_filter)
    if not kind_filters:
      raise ResourceNotFoundException(
          'No matching resource types found for {}'.format(krm_kind_filter))
    asset_filter.extend(kind_filters)

  args = ApiClientArgs(
      snapshot_time=None,
      limit=None,
      page_size=None,
      content_type=None,
      asset_types=sorted(asset_filter),
      parent=root_asset,
      filter_func=filter_func,
      relationship_types=[])
  asset_results = asset_client.List(args, do_filter=True)
  asset_string_array = []
  for item in asset_results:  # list of apitools Asset messages.
    item_str = encoding.MessageToJson(item)
    item_str = item_str.replace('"assetType"', '"asset_type"')
    asset_string_array.append(item_str)

  if not asset_string_array:
    if asset_types_filter:
      asset_msg = '\n With resource types in [{}].'.format(asset_types_filter)
    else:
      asset_msg = ''
    if filter_expression:
      filter_msg = '\n Matching provided filter [{}].'.format(filter_expression)
    else:
      filter_msg = ''
    raise ResourceNotFoundException(
        'No matching resources found for [{parent}] {assets} {filter}'.format(
            parent=root_asset, assets=asset_msg, filter=filter_msg))
  if not file_path:
    return '\n'.join(asset_string_array)
  else:
    try:
      files.WriteFileAtomically(file_path, '\n'.join(asset_string_array))
    except (ValueError, TypeError) as e:
      raise ClientException(e)  # pylint: disable=raise-missing-from
    return file_path