Exemplo n.º 1
0
def CreateServiceAccountKey(service_account_name):
    """Create a service account key.

  Args:
    service_account_name: Name of service acccount.

  Returns:
    The contents of the generated private key file as a string.
  """
    default_credential_path = os.path.join(
        config.Paths().global_config_dir,
        _Utf8ToBase64(service_account_name) + '.json')
    credential_file_path = encoding.GetEncodedValue(os.environ,
                                                    'LOCAL_CREDENTIAL_PATH',
                                                    default_credential_path)
    if os.path.exists(credential_file_path):
        return files.ReadFileContents(credential_file_path)

    warning_msg = (
        'Creating a user-managed service account key for '
        '{service_account_name}. This service account key will be '
        'the default credential pointed to by '
        'GOOGLE_APPLICATION_CREDENTIALS in the local development '
        'environment. The user is responsible for the storage,'
        'rotation, and deletion of this key. A copy of this key will '
        'be stored at {local_key_path}.\n'
        'Only use service accounts from a test project. Do not use '
        'service accounts from a production project.').format(
            service_account_name=service_account_name,
            local_key_path=credential_file_path)
    console_io.PromptContinue(message=warning_msg,
                              prompt_string='Continue?',
                              cancel_on_no=True)

    service = apis.GetClientInstance('iam', 'v1')
    message_module = service.MESSAGES_MODULE

    create_key_request = (
        message_module.IamProjectsServiceAccountsKeysCreateRequest(
            name=service_account_name,
            createServiceAccountKeyRequest=message_module.
            CreateServiceAccountKeyRequest(
                privateKeyType=message_module.CreateServiceAccountKeyRequest.
                PrivateKeyTypeValueValuesEnum.TYPE_GOOGLE_CREDENTIALS_FILE)))
    key = service.projects_serviceAccounts_keys.Create(create_key_request)

    files.WriteFileContents(credential_file_path, key.privateKeyData)

    return six.ensure_text(key.privateKeyData)
Exemplo n.º 2
0
    def testAppengineBuilder(self):
        args = self.parser.parse_args(['--appengine'])

        with files.TemporaryDirectory() as temp_dir:
            app_yaml = 'runtime: python37'
            files.WriteFileContents(os.path.join(temp_dir, 'app.yaml'),
                                    app_yaml)

            self.assertEqual(
                local._CreateBuilder(args, temp_dir),
                local.BuildpackBuilder(
                    builder=
                    'gcr.io/gae-runtimes/buildpacks/python37/builder:argo_current',
                    trust=True,
                    devmode=False))
Exemplo n.º 3
0
 def _WriteDisk(self, on_gce):
     """Updates cache on disk."""
     gce_cache_path = config.Paths().GCECachePath()
     with self.file_lock:
         try:
             files.WriteFileContents(gce_cache_path,
                                     six.text_type(on_gce),
                                     private=True)
         except (OSError, IOError, files.Error):
             # Failed to write Google Compute Engine credential cache file.
             # This could be due to permission reasons, or because it doesn't yet
             # exist.
             # Can't log here because the log module depends (indirectly) on this
             # one.
             pass
Exemplo n.º 4
0
def _CreateDefaultConfig(force_create):
    """Create the default configuration and migrate legacy properties.

  This will only do anything if there are no existing configurations.  If that
  is true, it will create one called default.  If there are existing legacy
  properties, it will populate the new configuration with those settings.
  The old file will be marked as deprecated.

  Args:
    force_create: bool, If False and no legacy properties exist to be migrated
      this will not physically create the default configuration.  This is ok
      as long as we are strictly reading properties from this configuration.

  Returns:
    str, The default configuration name.
  """
    paths = config.Paths()
    try:
        if not os.path.exists(paths.named_config_activator_path):
            # No configurations exist yet.  If there are legacy properties, we need
            # to create the configuration now and seed with those properties.  If no
            # legacy properties, only create the configuration if force_create is
            # True.
            legacy_properties = _GetAndDeprecateLegacyProperties(paths)
            if legacy_properties or force_create:
                file_utils.MakeDir(paths.named_config_directory)
                target_file = _FileForConfig(DEFAULT_CONFIG_NAME, paths)
                file_utils.WriteFileContents(target_file, legacy_properties)
                file_utils.WriteFileContents(paths.named_config_activator_path,
                                             DEFAULT_CONFIG_NAME)
    except file_utils.Error as e:
        raise NamedConfigFileAccessError(
            'Failed to create the default configuration. Ensure your have the '
            'correct permissions on: [{0}]'.format(
                paths.named_config_directory), e)
    return DEFAULT_CONFIG_NAME
Exemplo n.º 5
0
def GetFileChooserForDir(directory,
                         default_ignore_file=DEFAULT_IGNORE_FILE,
                         write_on_disk=True):
    """Gets the FileChooser object for the given directory.

  In order of preference:
  - Uses .gcloudignore file in the top-level directory
  - Generates Git-centric .gcloudignore file if Git files are found but no
    .gcloudignore exists. (If the directory is not writable, the file chooser
    corresponding to the ignore file that would have been generated is used).
  - If neither is found the returned FileChooser will choose all files.

  Args:
    directory: str, the path of the top-level directory to upload
    default_ignore_file: str, the ignore file to use if one is not found (and
      the directory has Git files).
    write_on_disk: bool, whether to save the generated gcloudignore to disk.

  Raises:
    BadIncludedFileError: if a file being included does not exist or is not in
      the same directory.

  Returns:
    FileChooser: the FileChooser for the directory. If there is no .gcloudignore
    file and it can't be created the returned FileChooser will choose all files.
  """
    gcloudignore_path = os.path.join(directory, IGNORE_FILE_NAME)
    try:
        return FileChooser.FromFile(gcloudignore_path)
    except BadFileError:
        pass
    if not _GitFilesExist(directory):
        return FileChooser([])

    ignore_contents = _GetIgnoreFileContents(default_ignore_file, directory)
    if write_on_disk:
        try:
            files.WriteFileContents(gcloudignore_path,
                                    ignore_contents,
                                    overwrite=False)
        except files.Error as err:
            log.info('Could not write .gcloudignore file: {}'.format(err))
        else:
            log.status.Print('Created .gcloudignore file. See `gcloud topic '
                             'gcloudignore` for details.')
    return FileChooser.FromString(ignore_contents,
                                  recurse=1,
                                  dirname=directory)
Exemplo n.º 6
0
def WriteRoutesConfig(emulators, output_file):
    """This writes out the routes information to a file.

  The routes will be written as json in the format
  {service1: [route1, route2], service2: [route3, route4]}

  Args:
    emulators: [str], emulators to route the traffic of
    output_file: str, file to write the configuration to
  """
    routes = {
        name: emulator.prefixes
        for name, emulator in six.iteritems(emulators)
    }

    files.WriteFileContents(output_file, json.dumps(routes, indent=2))
Exemplo n.º 7
0
    def testSetFileFlag(self):
        """Check that --*-file works."""
        files.WriteFileContents('daily-prices.yaml', _TEST_MAP_YAML)
        args = self.parser.parse_args(['--food-prices-file=daily-prices.yaml'])
        map_flags = map_util.GetMapFlagsFromArgs('food-prices', args)
        expected = {
            'set_flag_value': None,
            'update_flag_value': None,
            'clear_flag_value': None,
            'remove_flag_value': None,
            'file_flag_value': _TEST_MAP,
        }
        self.assertEqual(map_flags, expected)

        new_map = map_util.ApplyMapFlags(self.old_map, **map_flags)
        self.assertEqual(new_map, {'cod': 12, 'tomato': 2})
Exemplo n.º 8
0
    def BuildAndStoreFlexTemplateImage(image_gcr_path,
                                       flex_template_base_image, jar_paths,
                                       env, sdk_language):
        """Builds the flex template docker container image and stores it in GCR.

    Args:
      image_gcr_path: GCR location to store the flex template container image.
      flex_template_base_image: SDK version or base image to use.
      jar_paths: List of jar paths to pipelines and dependencies.
      env: Dictionary of env variables to set in the container image.
      sdk_language: SDK language of the flex template.

    Returns:
      True if container is built and store successfully.

    Raises:
      ValueError: If the parameters values are invalid.
    """
        Templates.__ValidateFlexTemplateEnv(env, sdk_language)
        with files.TemporaryDirectory() as temp_dir:
            log.status.Print(
                'Copying files to a temp directory {}'.format(temp_dir))
            jar_files = []
            for jar_path in jar_paths:
                absl_path = os.path.abspath(jar_path)
                shutil.copy2(absl_path, temp_dir)
                jar_files.append(os.path.split(absl_path)[1])

            log.status.Print(
                'Generating dockerfile to build the flex template container image...'
            )
            dockerfile_contents = Templates._BuildDockerfile(
                flex_template_base_image, jar_files, env, sdk_language)

            dockerfile_path = os.path.join(temp_dir, 'Dockerfile')
            files.WriteFileContents(dockerfile_path, dockerfile_contents)
            log.status.Print('Generated Dockerfile. Contents: {}'.format(
                dockerfile_contents))

            messages = cloudbuild_util.GetMessagesModule()
            build_config = submit_util.CreateBuildConfig(
                image_gcr_path, False, messages, None, 'cloudbuild.yaml', True,
                False, temp_dir, None, None, None, None, None, None)
            log.status.Print('Pushing flex template container image to GCR...')

            submit_util.Build(messages, False, build_config)
            return True
Exemplo n.º 9
0
    def Run(self, args):
        client = privateca_base.GetClientInstance()
        messages = privateca_base.GetMessagesModule()

        certificate_ref = args.CONCEPTS.certificate.Parse()
        certificate = client.projects_locations_certificateAuthorities_certificates.Get(
            messages.
            PrivatecaProjectsLocationsCertificateAuthoritiesCertificatesGetRequest(
                name=certificate_ref.RelativeName()))

        pem_chain = [certificate.pemCertificate]
        if args.include_chain:
            pem_chain += certificate.pemCertificateChain

        files.WriteFileContents(args.output_file, '\n'.join(pem_chain))
        log.status.write('Exported certificate [{}] to [{}].'.format(
            certificate_ref.RelativeName(), args.output_file))
Exemplo n.º 10
0
  def _StoreFlexTemplateFile(template_file_gcs_location, container_spec_json):
    """Stores flex template container spec file in GCS.

    Args:
      template_file_gcs_location: GCS location to store the template file.
      container_spec_json: Container spec in json format.

    Returns:
      Returns the stored flex template file gcs object on success.
      Propagates the error on failures.
    """
    with files.TemporaryDirectory() as temp_dir:
      local_path = os.path.join(temp_dir, 'template-file.json')
      files.WriteFileContents(local_path, container_spec_json)
      storage_client = storage_api.StorageClient()
      obj_ref = storage_util.ObjectReference.FromUrl(template_file_gcs_location)
      return storage_client.CopyFileToGCS(local_path, obj_ref)
Exemplo n.º 11
0
def OnlineEdit(text):
    """Edit will edit the provided text.

  Args:
    text: The initial text blob to provide for editing.

  Returns:
    The edited text blob.

  Raises:
    NoSaveException: If the user did not save the temporary file.
    EditorException: If the process running the editor has a
        problem.
  """
    fname = tempfile.NamedTemporaryFile(suffix='.txt').name
    files.WriteFileContents(fname, text)

    # Get the mod time, so we can check if anything was actually done.
    start_mtime = FileModifiedTime(fname)
    if (platforms.OperatingSystem.Current() is
            platforms.OperatingSystem.WINDOWS):
        try:
            SubprocessCheckCall([fname], shell=True)
        except subprocess.CalledProcessError as error:
            raise EditorException('Your editor exited with return code {0}; '
                                  'please try again.'.format(error.returncode))
    else:
        try:
            editor = os.getenv('EDITOR', 'vi')
            # We use shell=True and manual smashing of the args to permit users to set
            # EDITOR="emacs -nw", or similar things.
            # We used suprocess.check_call instead of subprocess.check_output because
            # subprocess.check_output requires a direct connection to a terminal.
            SubprocessCheckCall('{editor} {file}'.format(editor=editor,
                                                         file=fname),
                                shell=True)
        except subprocess.CalledProcessError as error:
            raise EditorException('Your editor exited with return code {0}; '
                                  'please try again. You may set the EDITOR '
                                  'environment to use a different text '
                                  'editor.'.format(error.returncode))
    end_mtime = FileModifiedTime(fname)
    if start_mtime == end_mtime:
        raise NoSaveException('edit aborted by user')

    return files.ReadFileContents(fname)
Exemplo n.º 12
0
  def _GetCID():
    """Gets the client id from the config file, or generates a new one.

    Returns:
      str, The hex string of the client id.
    """
    uuid_path = config.Paths().analytics_cid_path
    cid = None
    if os.path.exists(uuid_path):
      cid = files.ReadFileContents(uuid_path)
      if cid:
        return cid

    cid = uuid.uuid4().hex  # A random UUID
    files.MakeDir(os.path.dirname(uuid_path))
    files.WriteFileContents(uuid_path, cid)
    return cid
Exemplo n.º 13
0
    def WriteJsonToFile(self, output_file):
        """Writes configuration to file.

    The format will be
    {"localEmulators": {emulator1: port1, emulator2: port2},
     "proxyPort": port,
     "shouldProxyToGcp": bool}

    Args:
      output_file: str, file to write to
    """
        data = {
            'localEmulators': self._local_emulators,
            'proxyPort': self._proxy_port,
            'shouldProxyToGcp': self._should_proxy_to_gcp,
        }
        files.WriteFileContents(output_file, json.dumps(data, indent=2))
Exemplo n.º 14
0
  def _WriteAgentLogs():
    """Writes logs from the agent install deployment to a temporary file."""
    logs, err = kube_client.Logs(
        namespace, 'deployment/{}'.format(agent_install_deployment_name))
    if err:
      log.warning(
          'Could not fetch Connect agent installation deployment logs: {}'
          .format(err))
      return

    _, tmp_file = tempfile.mkstemp(
        suffix='_{}.log'.format(times.Now().strftime('%Y%m%d-%H%M%S')),
        prefix='gke_connect_',
    )
    files.WriteFileContents(tmp_file, logs, private=True)
    log.status.Print(
        'Connect agent installation deployment logs saved to [{}]'.format(
            tmp_file))
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    igm_ref = instance_groups_flags.CreateGroupReference(
        client, holder.resources, args)

    autoscaler = managed_instance_groups_utils.AutoscalerForMigByRef(
        client, holder.resources, igm_ref)
    if autoscaler:
      autoscaler_dict = encoding.MessageToDict(autoscaler)
      for f in _IGNORED_FIELDS:
        if f in autoscaler_dict:
          del autoscaler_dict[f]
    else:
      autoscaler_dict = None

    files.WriteFileContents(args.autoscaling_file, json.dumps(autoscaler_dict))
Exemplo n.º 16
0
    def WriteToFile(self, path, component_id=None):
        """Writes this snapshot back out to a JSON file.

    Args:
      path: str, The path of the file to write to.
      component_id: Limit snapshot to this component.
          If not specified all components are written out.

    Raises:
      ValueError: for non existent component_id.
    """
        sdk_def_dict = self.sdk_definition.ToDictionary()
        if component_id:
            component_dict = [
                c for c in sdk_def_dict['components']
                if c['id'] == component_id
            ]
            if not component_dict:
                raise ValueError(
                    'Component {} is not in this snapshot {}'.format(
                        component_id,
                        ','.join([c['id']
                                  for c in sdk_def_dict['components']])))
            if 'data' in component_dict[0]:
                # Remove non-essential/random parts from component data.
                for f in list(component_dict[0]['data'].keys()):
                    if f not in ('contents_checksum', 'type', 'source'):
                        del component_dict[0]['data'][f]
                # Source field is required for global snapshot, but is not for
                # component snapshot.
                component_dict[0]['data']['source'] = ''
            sdk_def_dict['components'] = component_dict
            # Remove unnecessary artifacts from snapshot.
            for key in list(sdk_def_dict.keys()):
                if key not in ('components', 'schema_version', 'revision',
                               'version'):
                    del sdk_def_dict[key]
        files.WriteFileContents(
            path,
            json.dumps(sdk_def_dict,
                       indent=2,
                       sort_keys=True,
                       separators=(',', ': ')))
Exemplo n.º 17
0
def ExportPrivateKey(private_key_output_file, private_key_bytes):
    """Export a private key to a filename, printing a warning to the user.

  Args:
    private_key_output_file: The path of the file to export to.
    private_key_bytes: The content in byte format to export.
  """

    try:
        # Make sure this file is only accesible to the running user before writing.
        files.PrivatizeFile(private_key_output_file)
        files.WriteFileContents(private_key_output_file, private_key_bytes)
        # Make file readable only by owner.
        os.chmod(private_key_output_file, 0o400)
        log.warning(KEY_OUTPUT_WARNING.format(private_key_output_file))
    except (files.Error, OSError, IOError):
        raise exceptions.FileOutputError(
            "Error writing to private key output file named '{}'".format(
                private_key_output_file))
def GetFeatureFlagsConfig(account_id, project_id):
    """Gets the feature flags config.

  If the feature flags config file does not exist or is stale, download and save
  the feature flags config. Otherwise, read the feature flags config. Errors
  will be logged, but will not interrupt normal operation.

  Args:
    account_id: str, account ID.
    project_id: str, project ID


  Returns:
    A FeatureFlagConfig, or None.
  """
    feature_flags_config_path = config.Paths().feature_flags_config_path

    with _FEATURE_FLAGS_LOCK:
        yaml_data = None
        if IsFeatureFlagsConfigStale(feature_flags_config_path):
            yaml_data = FetchFeatureFlagsConfig()
            try:
                file_utils.WriteFileContents(feature_flags_config_path,
                                             yaml_data or '')
            except file_utils.Error as e:
                logging.warning(
                    'Unable to write feature flags config [%s]: %s. Please '
                    'ensure that this path is writeable.',
                    feature_flags_config_path, e)
        else:
            try:
                yaml_data = file_utils.ReadFileContents(
                    feature_flags_config_path)
            except file_utils.Error as e:
                logging.warning(
                    'Unable to read feature flags config [%s]: %s. Please '
                    'ensure that this path is readable.',
                    feature_flags_config_path, e)

    if yaml_data:
        return FeatureFlagsConfig(yaml_data, account_id, project_id)
    return None
Exemplo n.º 19
0
  def Run(self, staging_area, config_file, project_dir, explicit_appyaml=None):
    # Logic is: copy/symlink the project in the staged area, and create a
    # simple file app.yaml for runtime: java11 if it does not exist.
    # If it exists in the standard and documented default location
    # (in project_dir/src/main/appengine/app.yaml), copy it in the staged
    # area.
    appenginewebxml = os.path.join(project_dir, 'src', 'main', 'webapp',
                                   'WEB-INF', 'appengine-web.xml')
    if os.path.exists(appenginewebxml):
      raise self.error()
    if explicit_appyaml:
      shutil.copyfile(explicit_appyaml, os.path.join(staging_area, 'app.yaml'))
    else:
      appyaml = os.path.join(project_dir, 'src', 'main', 'appengine',
                             'app.yaml')
      if os.path.exists(appyaml):
        # Put the user app.yaml at the root of the staging directory to deploy
        # as required by the Cloud SDK.
        shutil.copy2(appyaml, staging_area)
      else:
        # Create a very simple 1 liner app.yaml for Java11 runtime.
        files.WriteFileContents(
            os.path.join(staging_area, 'app.yaml'), 'runtime: java11\n')

    for name in os.listdir(project_dir):
      # Do not deploy locally built artifacts, buildpack will clean this anyway.
      if name == self.ignore:
        continue
      srcname = os.path.join(project_dir, name)
      dstname = os.path.join(staging_area, name)
      if os.path.isdir(srcname):
        if hasattr(os, 'symlink'):
          os.symlink(srcname, dstname)
        else:
          files.CopyTree(srcname, dstname)
      else:
        if hasattr(os, 'symlink'):
          os.symlink(srcname, dstname)
        else:
          shutil.copy2(srcname, dstname)

    return staging_area
Exemplo n.º 20
0
def _GenerateSetupPyIfNeeded(setup_py_path, package_name):
  """Generates a temporary setup.py file if there is none at the given path.

  Args:
    setup_py_path: str, a path to the expected setup.py location.
    package_name: str, the name of the Python package for which to write a
      setup.py file (used in the generated file contents).

  Returns:
    bool, whether the setup.py file was generated.
  """
  log.debug('Looking for setup.py file at [%s]', setup_py_path)
  if os.path.isfile(setup_py_path):
    log.info('Using existing setup.py file at [%s]', setup_py_path)
    return False

  setup_contents = DEFAULT_SETUP_FILE.format(package_name=package_name)
  log.info('Generating temporary setup.py file:\n%s', setup_contents)
  files.WriteFileContents(setup_py_path, setup_contents)
  return True
Exemplo n.º 21
0
  def testCorruptedCache(self):
    # Set cache to something that can't be parsed as YAML
    config_dir = config.Paths().global_config_dir
    cache_file = os.path.join(config_dir, ".apigee-cached-project-mapping")
    files.WriteFileContents(cache_file, "data: {{ unparseable }}")

    canned_organization_response = {
        "organizations": [{
            "organization": "my-project",
            "projectIds": ["my-project"]
        },]
    }
    self.AddHTTPResponse(
        "https://apigee.googleapis.com/v1/organizations",
        body=json.dumps(canned_organization_response))
    self.AddHTTPResponse(
        "https://apigee.googleapis.com/v1/"
        "organizations/my-project/environments",
        body=json.dumps(["test"]))
    self.RunApigee("environments list --project=my-project")
Exemplo n.º 22
0
def SaveCredentialsAsADC(credentials, file_path):
  """Saves the credentials to the given file.

  This file can be read back via
    cred = client.GoogleCredentials.from_stream(file_path)

  Args:
    credentials: client.OAuth2Credentials, obtained from a web flow
        or service account.
    file_path: str, file path to store credentials to. The file will be created.

  Raises:
    CredentialFileSaveError: on file io errors.
  """
  creds_type = creds.CredentialType.FromCredentials(credentials)
  if creds_type == creds.CredentialType.P12_SERVICE_ACCOUNT:
    raise CredentialFileSaveError(
        'Error saving Application Default Credentials: p12 keys are not'
        'supported in this format')

  if creds_type == creds.CredentialType.USER_ACCOUNT:
    credentials = client.GoogleCredentials(
        credentials.access_token,
        credentials.client_id,
        credentials.client_secret,
        credentials.refresh_token,
        credentials.token_expiry,
        credentials.token_uri,
        credentials.user_agent,
        credentials.revoke_uri)
  try:
    contents = json.dumps(credentials.serialization_data, sort_keys=True,
                          indent=2, separators=(',', ': '))
    files.WriteFileContents(file_path, contents, private=True)
  except files.Error as e:
    log.debug(e, exc_info=True)
    raise CredentialFileSaveError(
        'Error saving Application Default Credentials: ' + six.text_type(e))
Exemplo n.º 23
0
  def GenerateAppYaml(self, notify):
    """Generate app.yaml.

    Args:
      notify: depending on whether we're in deploy, write messages to the
        user or to log.
    Returns:
      (bool) True if file was written

    Note: this is not a recommended use-case,
    python-compat users likely have an existing app.yaml.  But users can
    still get here with the --runtime flag.
    """
    if not self.params.appinfo:
      app_yaml = os.path.join(self.root, 'app.yaml')
      if not os.path.exists(app_yaml):
        notify('Writing [app.yaml] to [%s].' % self.root)
        runtime = 'custom' if self.params.custom else self.runtime
        files.WriteFileContents(app_yaml,
                                PYTHON_APP_YAML.format(runtime=runtime))
        log.warning(APP_YAML_WARNING)
        return True
    return False
Exemplo n.º 24
0
def GenerateApi(base_dir, root_dir, api_name, api_version, api_config):
    """Invokes apitools generator for given api."""
    discovery_doc = api_config['discovery_doc']

    args = [gen_client.__file__]

    unelidable_request_methods = api_config.get('unelidable_request_methods')
    if unelidable_request_methods:
        args.append('--unelidable_request_methods={0}'.format(','.join(
            api_config['unelidable_request_methods'])))

    args.extend([
        '--init-file=empty',
        '--nogenerate_cli',
        '--infile={0}'.format(os.path.join(base_dir, root_dir, discovery_doc)),
        '--outdir={0}'.format(
            os.path.join(base_dir, root_dir, api_name, api_version)),
        '--overwrite',
        '--apitools_version=CloudSDK',
        '--root_package',
        '{0}.{1}.{2}'.format(root_dir.replace('/', '.'), api_name,
                             api_version),
        'client',
    ])
    logging.debug('Apitools gen %s', args)
    gen_client.main(args)

    package_dir = base_dir
    for subdir in [root_dir, api_name, api_version]:
        package_dir = os.path.join(package_dir, subdir)
        init_file = os.path.join(package_dir, '__init__.py')
        if not os.path.isfile(init_file):
            logging.warning(
                '%s does not have __init__.py file, generating ...',
                package_dir)
            files.WriteFileContents(init_file, _INIT_FILE_CONTENT)
def create_credential_config(args, config_type):
  """Creates the byoid credential config based on CLI arguments."""
  try:
    generator = get_generator(args, config_type)
    output = {
        'type': 'external_account',
        'audience': '//iam.googleapis.com/' + args.audience,
        'subject_token_type': generator.get_token_type(args.subject_token_type),
        'token_url': 'https://sts.googleapis.com/v1/token',
        'credential_source': generator.get_source(args),
    }

    if config_type is ConfigType.WORKFORCE_POOLS:
      output['workforce_pool_user_project'] = args.workforce_pool_user_project

    if args.service_account:
      output['service_account_impersonation_url'] = ''.join((
          'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/',
          args.service_account, ':generateAccessToken'))

    files.WriteFileContents(args.output_file, json.dumps(output, indent=2))
    log.CreatedResource(args.output_file, RESOURCE_TYPE)
  except GeneratorError as cce:
    log.CreatedResource(args.output_file, RESOURCE_TYPE, failed=cce.message)
Exemplo n.º 26
0
def _WriteTempFile(data):
    """Write a new temporary file and register for cleanup at program exit.

  Args:
    data: data to write to the file

  Returns:
    string: the path to the new temporary file

  Raises:
    Error: if the write failed
  """
    try:
        _, f = tempfile.mkstemp()
    except Exception as e:  # pylint: disable=broad-except
        raise exceptions.Error('failed to create temp file: {}'.format(e))

    try:
        files.WriteFileContents(f, data, private=True)
        atexit.register(lambda: os.remove(f))
        return f
    except Exception as e:  # pylint: disable=broad-except
        os.remove(f)
        raise exceptions.Error('failed to write temp file {}: {}'.format(f, e))
Exemplo n.º 27
0
def WriteToFileOrStdout(path,
                        content,
                        overwrite=True,
                        binary=False,
                        private=False,
                        create_path=False):
    """Writes content to the specified file or stdout if path is '-'.

  Args:
    path: str, The path of the file to write.
    content: str, The content to write to the file.
    overwrite: bool, Whether or not to overwrite the file if it exists.
    binary: bool, True to open the file in binary mode.
    private: bool, Whether to write the file in private mode.
    create_path: bool, True to create intermediate directories, if needed.

  Raises:
    Error: If the file cannot be written.
  """
    if path == '-':
        if binary:
            files.WriteStreamBytes(sys.stdout, content)
        else:
            out.write(content)
    elif binary:
        files.WriteBinaryFileContents(path,
                                      content,
                                      overwrite=overwrite,
                                      private=private,
                                      create_path=create_path)
    else:
        files.WriteFileContents(path,
                                content,
                                overwrite=overwrite,
                                private=private,
                                create_path=create_path)
Exemplo n.º 28
0
def GetFileChooserForDir(directory,
                         default_ignore_file=DEFAULT_IGNORE_FILE,
                         write_on_disk=True,
                         gcloud_ignore_creation_predicate=_GitFilesExist,
                         include_gitignore=True,
                         ignore_file=None):
    """Gets the FileChooser object for the given directory.

  In order of preference:
  - If ignore_file is not none, use it to skip files.
    If the specified file does not exist, raise error.
  - Use .gcloudignore file in the top-level directory.
  - Evaluates creation predicate to determine whether to generate .gcloudignore.
    include_gitignore determines whether the generated .gcloudignore will
    include the user's .gitignore if one exists. If the directory is not
    writable, the file chooser corresponding to the ignore file that would have
    been generated is used.
  - If the creation predicate evaluates to false, returned FileChooser
    will choose all files.

  Args:
    directory: str, the path of the top-level directory to upload
    default_ignore_file: str, the ignore file to use if one is not found (and
      the directory has Git files).
    write_on_disk: bool, whether to save the generated gcloudignore to disk.
    gcloud_ignore_creation_predicate: one argument function, indicating if a
      .gcloudignore file should be created. The argument is the path of the
      directory that would contain the .gcloudignore file. By default
      .gcloudignore file will be created if and only if the directory contains
      .gitignore file or .git directory.
    include_gitignore: bool, whether the generated gcloudignore should include
      the user's .gitignore if present.
    ignore_file: custom ignore_file name.
              Override .gcloudignore file to customize files to be skipped.

  Raises:
    BadIncludedFileError: if a file being included does not exist or is not in
      the same directory.

  Returns:
    FileChooser: the FileChooser for the directory. If there is no .gcloudignore
    file and it can't be created the returned FileChooser will choose all files.
  """

    if ignore_file:
        gcloudignore_path = os.path.join(directory, ignore_file)
    else:
        if not properties.VALUES.gcloudignore.enabled.GetBool():
            log.info(
                'Not using a .gcloudignore file since gcloudignore is globally '
                'disabled.')
            return FileChooser([])
        gcloudignore_path = os.path.join(directory, IGNORE_FILE_NAME)
    try:
        chooser = FileChooser.FromFile(gcloudignore_path)
    except BadFileError:
        pass
    else:
        log.info('Using ignore file at [{}].'.format(gcloudignore_path))
        return chooser
    if not gcloud_ignore_creation_predicate(directory):
        log.info('Not using ignore file.')
        return FileChooser([])

    ignore_contents = _GetIgnoreFileContents(default_ignore_file, directory,
                                             include_gitignore)
    log.info('Using default gcloudignore file:\n{0}\n{1}\n{0}'.format(
        '--------------------------------------------------', ignore_contents))
    if write_on_disk:
        try:
            files.WriteFileContents(gcloudignore_path,
                                    ignore_contents,
                                    overwrite=False)
        except files.Error as err:
            log.info('Could not write .gcloudignore file: {}'.format(err))
        else:
            log.status.Print('Created .gcloudignore file. See `gcloud topic '
                             'gcloudignore` for details.')
    return FileChooser.FromString(ignore_contents,
                                  recurse=1,
                                  dirname=directory)
Exemplo n.º 29
0
def DeployConnectAgent(args,
                       service_account_key_data,
                       docker_credential_data,
                       upgrade=False):
  """Deploys the GKE Connect agent to the cluster.

  Args:
    args: arguments of the command.
    service_account_key_data: The contents of a Google IAM service account JSON
      file
    docker_credential_data: A credential that can be used to access Docker, to
      be stored in a secret and referenced from pod.spec.ImagePullSecrets.
    upgrade: whether to attempt to upgrade the agent, rather than replacing it.

  Raises:
    exceptions.Error: If the agent cannot be deployed properly
    calliope_exceptions.MinimumArgumentException: If the agent cannot be
    deployed properly
  """
  kube_client = KubernetesClient(args)

  image = args.docker_image
  if not image:
    # Get the SHA for the default image.
    try:
      digest = ImageDigestForContainerImage(DEFAULT_CONNECT_AGENT_IMAGE,
                                            DEFAULT_CONNECT_AGENT_TAG)
      image = '{}@{}'.format(DEFAULT_CONNECT_AGENT_IMAGE, digest)
    except Exception as exp:
      raise exceptions.Error(
          'could not determine image digest for {}:{}: {}'.format(
              DEFAULT_CONNECT_AGENT_IMAGE, DEFAULT_CONNECT_AGENT_TAG, exp))

  project_id = properties.VALUES.core.project.GetOrFail()
  namespace = _GKEConnectNamespace(kube_client, project_id)

  full_manifest, agent_install_deployment_name = GenerateInstallManifest(
      project_id, namespace, image, service_account_key_data,
      docker_credential_data, args.CLUSTER_NAME, args.proxy)

  # Generate a manifest file if necessary.
  if args.manifest_output_file:
    try:
      files.WriteFileContents(
          files.ExpandHomeDir(args.manifest_output_file),
          full_manifest,
          private=True)
    except files.Error as e:
      exceptions.Error('could not create manifest file: {}'.format(e))

    log.status.Print(MANIFEST_SAVED_MESSAGE.format(args.manifest_output_file))
    return

  log.status.Print('Deploying GKE Connect agent to cluster...')

  # During an upgrade, the namespace should not be deleted.
  if not upgrade:
    # Delete the ns if necessary
    if kube_client.NamespaceExists(namespace):
      console_io.PromptContinue(
          message='Namespace [{namespace}] already exists in the cluster. This '
          'may be from a previous installation of the agent. If you want to '
          'investigate, enter "n" and run\n\n'
          '  kubectl \\\n'
          '    --kubeconfig={kubeconfig} \\\n'
          '    --context={context} \\\n'
          '    get all -n {namespace}\n\n'
          'Continuing will delete namespace [{namespace}].'.format(
              namespace=namespace,
              kubeconfig=kube_client.kubeconfig,
              context=kube_client.context),
          cancel_on_no=True)
      try:
        succeeded, error = waiter.WaitFor(
            KubernetesPoller(),
            NamespaceDeleteOperation(namespace, kube_client),
            'Deleting namespace [{}] in the cluster'.format(namespace),
            pre_start_sleep_ms=NAMESPACE_DELETION_INITIAL_WAIT_MS,
            max_wait_ms=NAMESPACE_DELETION_TIMEOUT_MS,
            wait_ceiling_ms=NAMESPACE_DELETION_MAX_POLL_INTERVAL_MS,
            sleep_ms=NAMESPACE_DELETION_INITIAL_POLL_INTERVAL_MS)
      except waiter.TimeoutError as e:
        # waiter.TimeoutError assumes that the operation is a Google API
        # operation, and prints a debugging string to that effect.
        raise exceptions.Error(
            'Could not delete namespace [{}] from cluster.'.format(namespace))

      if not succeeded:
        raise exceptions.Error(
            'Could not delete namespace [{}] from cluster. Error: {}'.format(
                namespace, error))

  # Create or update the agent install deployment and related resources.
  err = kube_client.Apply(full_manifest)
  if err:
    raise exceptions.Error(
        'Failed to apply manifest to cluster: {}'.format(err))

  kubectl_log_cmd = (
      'kubectl --kubeconfig={} --context={} logs -n {} -l app={}'.format(
          kube_client.kubeconfig, kube_client.context, namespace,
          AGENT_INSTALL_APP_LABEL))

  def _WriteAgentLogs():
    """Writes logs from the agent install deployment to a temporary file."""
    logs, err = kube_client.Logs(
        namespace, 'deployment/{}'.format(agent_install_deployment_name))
    if err:
      log.warning(
          'Could not fetch Connect agent installation deployment logs: {}'
          .format(err))
      return

    _, tmp_file = tempfile.mkstemp(
        suffix='_{}.log'.format(times.Now().strftime('%Y%m%d-%H%M%S')),
        prefix='gke_connect_',
    )
    files.WriteFileContents(tmp_file, logs, private=True)
    log.status.Print(
        'Connect agent installation deployment logs saved to [{}]'.format(
            tmp_file))

  try:
    succeeded, error = waiter.WaitFor(
        KubernetesPoller(),
        DeploymentPodsAvailableOperation(namespace,
                                         RUNTIME_CONNECT_AGENT_DEPLOYMENT_NAME,
                                         image, kube_client),
        'Waiting for Connect agent to be installed',
        pre_start_sleep_ms=AGENT_INSTALL_INITIAL_WAIT_MS,
        max_wait_ms=AGENT_INSTALL_TIMEOUT_MS,
        wait_ceiling_ms=AGENT_INSTALL_MAX_POLL_INTERVAL_MS,
        sleep_ms=AGENT_INSTALL_INITIAL_POLL_INTERVAL_MS)
  except waiter.TimeoutError:
    # waiter.TimeoutError assumes that the operation is a Google API operation,
    # and prints a debugging string to that effect.
    _WriteAgentLogs()
    raise exceptions.Error(
        'Connect agent installation timed out. Leaving deployment in cluster '
        'for further debugging.\nTo view logs from the cluster:\n\n'
        '{}\n'.format(kubectl_log_cmd))

  _WriteAgentLogs()

  if not succeeded:
    raise exceptions.Error(
        'Connect agent installation did not succeed. To view logs from the '
        'cluster: {}\nKubectl error log: {}'.format(kubectl_log_cmd, error))

  log.status.Print('Connect agent installation succeeded.')
Exemplo n.º 30
0
 def Run(self, args):
     ca_pool_ref = args.CONCEPTS.ca_pool.Parse()
     pem_bag = self._GetRootCerts(ca_pool_ref)
     files.WriteFileContents(args.output_file, pem_bag)
     log.status.write('Exported the CA certificates to [{}].'.format(
         args.output_file))