Exemplo n.º 1
0
def CreateServiceAccountKey(service_account_name):
  """Create a service account key.

  Args:
    service_account_name: Name of service acccount.

  Returns:
    The contents of the generated private key file as a string.
  """
  default_credential_path = os.path.join(
      config.Paths().global_config_dir,
      _Utf8ToBase64(service_account_name) + '.json')
  credential_file_path = os.environ.get('LOCAL_CREDENTIAL_PATH',
                                        default_credential_path)
  if os.path.exists(credential_file_path):
    return files.ReadFileContents(credential_file_path)

  warning_msg = ('Creating a user-managed service account key for '
                 '{service_account_name}. This service account key will be '
                 'the default credential pointed to by '
                 'GOOGLE_APPLICATION_CREDENTIALS in the local development '
                 'environment. The user is responsible for the storage,'
                 'rotation, and deletion of this key. A copy of this key will '
                 'be stored at {local_key_path}').format(
                     service_account_name=service_account_name,
                     local_key_path=credential_file_path)
  console_io.PromptContinue(
      message=warning_msg, prompt_string='Continue?', cancel_on_no=True)

  service = apis.GetClientInstance('iam', 'v1')
  message_module = service.MESSAGES_MODULE

  create_key_request = (
      message_module.IamProjectsServiceAccountsKeysCreateRequest(
          name=service_account_name,
          createServiceAccountKeyRequest=message_module
          .CreateServiceAccountKeyRequest(
              privateKeyType=message_module.CreateServiceAccountKeyRequest
              .PrivateKeyTypeValueValuesEnum.TYPE_GOOGLE_CREDENTIALS_FILE)))
  key = service.projects_serviceAccounts_keys.Create(create_key_request)

  files.WriteFileContents(credential_file_path, key.privateKeyData)

  return six.u(key.privateKeyData)
Exemplo n.º 2
0
  def __init__(self, name, command_root_directory,
               allow_non_existing_modules=False,
               logs_dir=config.Paths().logs_dir, version_func=None,
               known_error_handler=None, yaml_command_translator=None):
    """Initialize Calliope.

    Args:
      name: str, The name of the top level command, used for nice error
        reporting.
      command_root_directory: str, The path to the directory containing the main
        CLI module.
      allow_non_existing_modules: True to allow extra module directories to not
        exist, False to raise an exception if a module does not exist.
      logs_dir: str, The path to the root directory to store logs in, or None
        for no log files.
      version_func: func, A function to call for a top-level -v and
        --version flag. If None, no flags will be available.
      known_error_handler: f(x)->None, A function to call when an known error is
        handled. It takes a single argument that is the exception.
      yaml_command_translator: YamlCommandTranslator, An instance of a
        translator that will be used to load commands written as a yaml spec.

    Raises:
      backend.LayoutException: If no command root directory is given.
    """
    self.__name = name
    self.__command_root_directory = command_root_directory
    if not self.__command_root_directory:
      raise command_loading.LayoutException(
          'You must specify a command root directory.')

    self.__allow_non_existing_modules = allow_non_existing_modules

    self.__logs_dir = logs_dir
    self.__version_func = version_func
    self.__known_errror_handler = known_error_handler
    self.__yaml_command_translator = yaml_command_translator

    self.__pre_run_hooks = []
    self.__post_run_hooks = []

    self.__modules = []
    self.__missing_components = {}
    self.__release_tracks = {}
Exemplo n.º 3
0
    def __init__(self,
                 name,
                 command_root_directory,
                 allow_non_existing_modules=False,
                 load_context=None,
                 logs_dir=config.Paths().logs_dir,
                 version_func=None):
        """Initialize Calliope.

    Args:
      name: str, The name of the top level command, used for nice error
        reporting.
      command_root_directory: str, The path to the directory containing the main
        CLI module.
      allow_non_existing_modules: True to allow extra module directories to not
        exist, False to raise an exception if a module does not exist.
      load_context: A function that returns a context dict, or None for a
        default which always returns {}.
      logs_dir: str, The path to the root directory to store logs in, or None
        for no log files.
      version_func: func, A function to call for a top-level -v and
        --version flag. If None, no flags will be available.

    Raises:
      backend.LayoutException: If no command root directory is given.
    """
        self.__name = name
        self.__command_root_directory = command_root_directory
        if not self.__command_root_directory:
            raise backend.LayoutException(
                'You must specify a command root directory.')

        self.__allow_non_existing_modules = allow_non_existing_modules

        self.__config_hooks = backend.ConfigHooks(load_context=load_context)
        self.__logs_dir = logs_dir
        self.__version_func = version_func

        self.__pre_run_hooks = []
        self.__post_run_hooks = []

        self.__modules = []
        self.__missing_components = {}
        self.__release_tracks = {}
Exemplo n.º 4
0
    def __init__(self, anonymizer=None):
        anonymizer = anonymizer or NoopAnonymizer()
        self.sdk_root = anonymizer.ProcessPath(config.Paths().sdk_root)
        self.release_channel = config.INSTALLATION_CONFIG.release_channel
        self.repo_url = config.INSTALLATION_CONFIG.snapshot_url
        repos = properties.VALUES.component_manager.additional_repositories.Get(
            validate=False)
        self.additional_repos = repos.split(',') if repos else []
        # Keep it as array for structured output.
        path = encoding.GetEncodedValue(os.environ, 'PATH',
                                        '').split(os.pathsep)
        self.python_path = [
            anonymizer.ProcessPath(encoding.Decode(path_elem))
            for path_elem in sys.path
        ]

        if self.sdk_root:
            manager = update_manager.UpdateManager()
            self.components = manager.GetCurrentVersionsInformation()
            self.old_tool_paths = [
                anonymizer.ProcessPath(p)
                for p in manager.FindAllOldToolsOnPath()
            ]
            self.duplicate_tool_paths = [
                anonymizer.ProcessPath(p)
                for p in manager.FindAllDuplicateToolsOnPath()
            ]
            paths = [os.path.realpath(p) for p in path]
            this_path = os.path.realpath(
                os.path.join(self.sdk_root,
                             update_manager.UpdateManager.BIN_DIR_NAME))
            # TODO(b/36055867): Validate symlinks in /usr/local/bin when we start
            # creating them.
            self.on_path = this_path in paths
        else:
            self.components = {}
            self.old_tool_paths = []
            self.duplicate_tool_paths = []
            self.on_path = False

        self.path = [anonymizer.ProcessPath(p) for p in path]
        self.kubectl = file_utils.SearchForExecutableOnPath('kubectl')
        if self.kubectl:
            self.kubectl = anonymizer.ProcessPath(self.kubectl[0])
Exemplo n.º 5
0
 def __init__(self):
     self.use_client_certificate = (
         properties.VALUES.context_aware.use_client_certificate.GetBool())
     self._cert_and_key_path = None
     self.client_cert_path = None
     self.client_cert_password = None
     self.cert_provider_command = ''
     atexit.register(self.Cleanup)
     if self.use_client_certificate:
         # Search for configuration produced by endpoint verification
         cfg_file = _AutoDiscoveryFilePath()
         # Autodiscover context aware settings from configuration file created by
         # end point verification agent
         try:
             contents = files.ReadFileContents(cfg_file)
             log.debug('context aware settings detected at %s', cfg_file)
             json_out = json.loads(contents)
             if 'cert_provider_command' in json_out:
                 # Execute the cert provider to provision client certificates for
                 # context aware access
                 self.cert_provider_command = json_out[
                     'cert_provider_command']
                 # Remember the certificate path when auto provisioning
                 # to cleanup after use
                 self._cert_and_key_path = os.path.join(
                     config.Paths().global_config_dir, 'caa_cert.pem')
                 # Certs provisioned using endpoint verification are stored as a
                 # single file holding both the public certificate
                 # and the private key
                 self._ProvisionClientCert(self.cert_provider_command,
                                           self._cert_and_key_path)
                 self.client_cert_path = self._cert_and_key_path
             else:
                 raise CertProvisionException('no cert provider detected')
         except files.Error as e:
             log.debug('context aware settings discovery file %s - %s',
                       cfg_file, e)
         except CertProvisionException as e:
             log.error('failed to provision client certificate - %s', e)
         if self.client_cert_path is None:
             raise ConfigException(
                 'Use of client certificate requires endpoint verification agent. '
                 'Run `gcloud topic client-certificate` for installation guide.'
             )
Exemplo n.º 6
0
def _GetLegacyGen(account, creds, scopes=None):
    if scopes is None:
        scopes = config.CLOUDSDK_SCOPES
    return _LegacyGenerator(
        multistore_path=config.Paths().LegacyCredentialsMultistorePath(
            account),
        json_path=config.Paths().LegacyCredentialsJSONPath(account),
        gae_java_path=config.Paths().LegacyCredentialsGAEJavaPath(account),
        gsutil_path=config.Paths().LegacyCredentialsGSUtilPath(account),
        key_path=config.Paths().LegacyCredentialsKeyPath(account),
        json_key_path=config.Paths().LegacyCredentialsJSONKeyPath(account),
        credentials=creds,
        scopes=scopes)
Exemplo n.º 7
0
  def __init__(self, name=None, create=True):
    """ResourceCache constructor.

    Args:
      name: The persistent cache object name. If None then a default name
        conditioned on the account name is used.
          <GLOBAL_CONFIG_DIR>/cache/<ACCOUNT>/resource.cache
      create: Create the cache if it doesn't exist if True.
    """
    if not name:
      path = [config.Paths().cache_dir]
      account = properties.VALUES.core.account.Get(required=False)
      if account:
        path.append(account)
      files.MakeDir(os.path.join(*path))
      path.append('resource.cache')
      name = os.path.join(*path)
    super(ResourceCache, self).__init__(
        name=name, create=create, version='googlecloudsdk.resource-1.0')
Exemplo n.º 8
0
    def __GetCID():
        """Gets the client id from the config file, or generates a new one.

    Returns:
      str, The hex string of the client id.
    """
        uuid_path = config.Paths().analytics_cid_path
        cid = None
        if os.path.exists(uuid_path):
            with open(uuid_path) as f:
                cid = f.read()
            if cid:
                return cid

        with open(uuid_path, 'w') as f:
            cid = uuid.uuid4().hex
            f.write(cid)  # A random UUID

        return cid
def _FindStorageKeyForAccount(account):
  """Scans credential file for keys matching given account.

  If such key(s) is found it checks that current set of scopes is a subset of
  scopes associated with the key.

  Args:
    account: str, The account tied to the storage key being fetched.

  Returns:
    dict, key to be used in the credentials store.
  """
  storage_path = config.Paths().credentials_path
  current_scopes = set(config.CLOUDSDK_SCOPES)
  equivalent_keys = [key for key in
                     multistore_file.get_all_credential_keys(
                         filename=storage_path)
                     if (key.get('type') == 'google-cloud-sdk' and
                         key.get('account') == account and (
                             'scope' not in key or
                             set(key.get('scope').split()) >= current_scopes))]

  preferred_key = _GetStorageKeyForAccount(account)
  if preferred_key in equivalent_keys:
    equivalent_keys.remove(preferred_key)
  elif equivalent_keys:  # Migrate credentials over to new key format.
    storage = multistore_file.get_credential_storage_custom_key(
        filename=storage_path,
        key_dict=equivalent_keys[0])
    creds = storage.get()
    storage = multistore_file.get_credential_storage_custom_key(
        filename=storage_path,
        key_dict=preferred_key)
    storage.put(creds)

  # Remove all other entries.
  for key in equivalent_keys:
    storage = multistore_file.get_credential_storage_custom_key(
        filename=storage_path,
        key_dict=key)
    storage.delete()

  return preferred_key
Exemplo n.º 10
0
def CreateCLI():
    """Generates the gcloud CLI."""
    sdk_root = config.Paths().sdk_root
    if sdk_root:
        help_dir = os.path.join(sdk_root, 'help')
    else:
        help_dir = None
    loader = cli.CLILoader(name='gcloud',
                           command_root_directory=os.path.join(
                               cli.GoogleCloudSDKPackageRoot(), 'gcloud',
                               'sdktools', 'root'),
                           allow_non_existing_modules=True,
                           version_func=VersionFunc,
                           help_dir=help_dir)
    pkg_root = cli.GoogleCloudSDKPackageRoot()
    loader.AddModule('auth',
                     os.path.join(pkg_root, 'gcloud', 'sdktools', 'auth'))
    loader.AddModule('bigquery', os.path.join(pkg_root, 'bigquery',
                                              'commands'))
    loader.AddModule(
        'components', os.path.join(pkg_root, 'gcloud', 'sdktools',
                                   'components'))
    loader.AddModule('compute', os.path.join(pkg_root, 'compute',
                                             'subcommands'))
    loader.AddModule('config',
                     os.path.join(pkg_root, 'gcloud', 'sdktools', 'config'))
    loader.AddModule('dns', os.path.join(pkg_root, 'dns', 'dnstools'))
    loader.AddModule('endpoints',
                     os.path.join(pkg_root, 'endpoints', 'commands'))
    loader.AddModule('preview', os.path.join(pkg_root, 'preview', 'commands'))
    # Put app and datastore under preview for now.
    loader.AddModule('preview.app',
                     os.path.join(pkg_root, 'appengine', 'app_commands'))
    loader.AddModule('preview.datastore',
                     os.path.join(pkg_root, 'appengine', 'datastore_commands'))
    loader.AddModule('projects', os.path.join(pkg_root, 'projects',
                                              'commands'))
    loader.AddModule('sql', os.path.join(pkg_root, 'sql', 'tools'))

    # Check for updates on shutdown but not for any of the updater commands.
    loader.RegisterPostRunHook(UpdateCheck,
                               exclude_commands=r'gcloud\.components\..*')
    return loader.Generate()
Exemplo n.º 11
0
    def Load():
        """Loads the set of properties for the CloudSDK CLIs from files.

    This function will load the properties file, first from the installation
    config, then the global config directory CLOUDSDK_GLOBAL_CONFIG_DIR,
    and then from the workspace config directory CLOUDSDK_WORKSPACE_CONFIG_DIR.

    Returns:
      properties.Properties, The CloudSDK properties.
    """
        config_paths = config.Paths()
        paths = [
            config_paths.installation_properties_path,
            config_paths.user_properties_path,
            config_paths.workspace_properties_path
        ]
        # Remove anything that was None.
        paths = [p for p in paths if p]
        return _PropertiesFile(paths)
def GetFeatureFlagsConfig(account_id, project_id):
    """Gets the feature flags config.

  If the feature flags config file does not exist or is stale, download and save
  the feature flags config. Otherwise, read the feature flags config. Errors
  will be logged, but will not interrupt normal operation.

  Args:
    account_id: str, account ID.
    project_id: str, project ID


  Returns:
    A FeatureFlagConfig, or None.
  """
    feature_flags_config_path = config.Paths().feature_flags_config_path

    with _FEATURE_FLAGS_LOCK:
        yaml_data = None
        if IsFeatureFlagsConfigStale(feature_flags_config_path):
            yaml_data = FetchFeatureFlagsConfig()
            try:
                file_utils.WriteFileContents(feature_flags_config_path,
                                             yaml_data or '')
            except file_utils.Error as e:
                logging.warning(
                    'Unable to write feature flags config [%s]: %s. Please '
                    'ensure that this path is writeable.',
                    feature_flags_config_path, e)
        else:
            try:
                yaml_data = file_utils.ReadFileContents(
                    feature_flags_config_path)
            except file_utils.Error as e:
                logging.warning(
                    'Unable to read feature flags config [%s]: %s. Please '
                    'ensure that this path is readable.',
                    feature_flags_config_path, e)

    if yaml_data:
        return FeatureFlagsConfig(yaml_data, account_id, project_id)
    return None
Exemplo n.º 13
0
def main():
    """Launches gsutil."""

    project, account = bootstrapping.GetActiveProjectAndAccount()
    pass_credentials = properties.VALUES.core.pass_credentials_to_gsutil.GetBool(
    )

    if pass_credentials and account not in c_gce.Metadata().Accounts():
        gsutil_path = config.Paths().LegacyCredentialsGSUtilPath(account)

        boto_config = os.environ.get('BOTO_CONFIG', '')
        boto_path = os.environ.get('BOTO_PATH', '')

        # We construct a BOTO_PATH that tacks the refresh token config
        # on the end.
        if boto_config:
            boto_path = os.pathsep.join([boto_config, gsutil_path])
        elif boto_path:
            boto_path = os.pathsep.join([boto_path, gsutil_path])
        else:
            path_parts = [
                '/etc/boto.cfg',
                os.path.expanduser(os.path.join('~', '.boto')), gsutil_path
            ]
            boto_path = os.pathsep.join(path_parts)

        if 'BOTO_CONFIG' in os.environ:
            del os.environ['BOTO_CONFIG']
        os.environ['BOTO_PATH'] = boto_path

    # Tell gsutil whether gcloud analytics collection is enabled.
    os.environ['GA_CID'] = metrics.GetCIDIfMetricsEnabled()

    args = []

    if project:
        args.extend(['-o', 'GSUtil:default_project_id=%s' % project])
    if pass_credentials and account in c_gce.Metadata().Accounts():
        # Tell gsutil to look for GCE service accounts.
        args.extend(['-o', 'GoogleCompute:service_account=default'])

    bootstrapping.ExecutePythonTool('platform/gsutil', 'gsutil', *args)
Exemplo n.º 14
0
    def testSentinel(self):
        paths = config.Paths()
        self.ClearAllConfigurations()
        self.AssertExistingConfigs('default', 'default')

        # Don't touch sentinel when updating config on non-active configuration.
        c = named_configs.ConfigurationStore.CreateConfig('foo')
        c.PersistProperty('core', 'account', 'foo')
        self.assertFalse(os.path.isfile(paths.config_sentinel_file))

        # Do update it if the configuration is active.
        c = named_configs.ConfigurationStore.ActiveConfig()
        c.PersistProperty('core', 'account', 'foo')
        self.assertTrue(os.path.isfile(paths.config_sentinel_file))

        # Do update it if you change the active configuration.
        os.remove(paths.config_sentinel_file)
        self.assertFalse(os.path.isfile(paths.config_sentinel_file))
        named_configs.ConfigurationStore.ActivateConfig('foo')
        self.assertTrue(os.path.isfile(paths.config_sentinel_file))
Exemplo n.º 15
0
def main():
  """Launches appcfg.py."""

  unused_project, account = bootstrapping.GetActiveProjectAndAccount()
  json_creds = config.Paths().LegacyCredentialsJSONPath(account)

  args = ['--skip_sdk_update_check']
  try:
    creds = devshell.DevshellCredentials()
    args.extend([
        '--oauth2_access_token=' + creds.access_token
    ])
  except devshell.NoDevshellServer:
    args.extend([
        '--oauth2',
        '--oauth2_client_id=32555940559.apps.googleusercontent.com',
        '--oauth2_client_secret=ZmssLNjJy2998hD4CTg2ejr2',
        '--oauth2_credential_file={0}'.format(json_creds),
    ])
  bootstrapping.ExecutePythonTool('platform/google_appengine', 'appcfg.py', *args)
Exemplo n.º 16
0
  def testCorruptedCache(self):
    # Set cache to something that can't be parsed as YAML
    config_dir = config.Paths().global_config_dir
    cache_file = os.path.join(config_dir, ".apigee-cached-project-mapping")
    files.WriteFileContents(cache_file, "data: {{ unparseable }}")

    canned_organization_response = {
        "organizations": [{
            "organization": "my-project",
            "projectIds": ["my-project"]
        },]
    }
    self.AddHTTPResponse(
        "https://apigee.googleapis.com/v1/organizations",
        body=json.dumps(canned_organization_response))
    self.AddHTTPResponse(
        "https://apigee.googleapis.com/v1/"
        "organizations/my-project/environments",
        body=json.dumps(["test"]))
    self.RunApigee("environments list --project=my-project")
Exemplo n.º 17
0
def GetCredentialStore(store_file=None, access_token_file=None):
    """Constructs credential store.

  Args:
    store_file: str, optional path to use for storage. If not specified
      config.Paths().credentials_path will be used.

    access_token_file: str, optional path to use for access token storage. Note
      that some implementations use store_file to also store access_tokens, in
      which case this argument is ignored.

  Returns:
    CredentialStore object.
  """

    if properties.VALUES.auth.use_sqlite_store.GetBool():
        return _GetSqliteStore(store_file, access_token_file)

    return Oauth2ClientCredentialStore(store_file
                                       or config.Paths().credentials_path)
Exemplo n.º 18
0
 def testAccessTokenCacheReadonlyStore(self):
   access_token_cache = creds.AccessTokenCache(
       config.Paths().access_token_db_path)
   credentials = creds.FromJson(self.SERVICE_ACCOUNT_CREDENTIALS_JSON)
   credentials.token_response = json.loads("""{"id_token": "woweee"}""")
   self.assertIsNone(credentials.access_token)
   self.StartObjectPatch(
       access_token_cache,
       '_Execute',
       side_effect=sqlite3.OperationalError(
           'attempt to write to read-only database'))
   access_token_cache.Store(
       credentials.service_account_email,
       access_token='token1',
       token_expiry=datetime.datetime.utcnow() +
       datetime.timedelta(seconds=3600),
       rapt_token=None,
       id_token=None)
   self.AssertLogContains('Could not store access token in cache: '
                          'attempt to write to read-only database')
Exemplo n.º 19
0
    def Load():
        """Loads the set of active properties from file.

    This includes both the installation configuration as well as the currently
    active configuration file.

    Returns:
      properties_file.PropertiesFile, The CloudSDK properties.
    """
        ActivePropertiesFile._LOCK.acquire()
        try:
            if not ActivePropertiesFile._PROPERTIES:
                ActivePropertiesFile._PROPERTIES = properties_file.PropertiesFile(
                    [
                        config.Paths().installation_properties_path,
                        _ActiveConfig(force_create=False).file_path
                    ])
        finally:
            ActivePropertiesFile._LOCK.release()
        return ActivePropertiesFile._PROPERTIES
Exemplo n.º 20
0
  def testAttachAccessTokenCacheStoreGoogleAuth(self):
    # Create credentials.
    credentials = google_auth_service_account.Credentials(
        None, 'email', 'token_uri')
    self.assertIsNone(credentials.token)

    # Create access token cache.
    access_token_cache = creds.AccessTokenCache(
        config.Paths().access_token_db_path)
    access_token_cache.Store(
        credentials.service_account_email,
        access_token='token1',
        token_expiry=datetime.datetime.utcnow() +
        datetime.timedelta(seconds=3600),
        rapt_token=None,
        id_token=None)

    # Attach access token cache store to credentials.
    new_creds = creds.MaybeAttachAccessTokenCacheStoreGoogleAuth(credentials)
    self.assertEqual(new_creds.token, 'token1')
Exemplo n.º 21
0
  def __init__(self, account, credentials, scopes=None):
    self.credentials = credentials
    if self._cred_type not in (creds.USER_ACCOUNT_CREDS_NAME,
                               creds.SERVICE_ACCOUNT_CREDS_NAME,
                               creds.P12_SERVICE_ACCOUNT_CREDS_NAME):
      raise creds.CredentialFileSaveError(
          'Unsupported credentials type {0}'.format(type(self.credentials)))
    if scopes is None:
      self.scopes = config.CLOUDSDK_SCOPES
    else:
      self.scopes = scopes

    paths = config.Paths()
    # Bq file is not generated here. bq CLI generates it using the adc at
    # self._adc_path and uses it as the cache.
    # Register so it is cleaned up.
    self._bq_path = paths.LegacyCredentialsBqPath(account)
    self._gsutil_path = paths.LegacyCredentialsGSUtilPath(account)
    self._p12_key_path = paths.LegacyCredentialsP12KeyPath(account)
    self._adc_path = paths.LegacyCredentialsAdcPath(account)
Exemplo n.º 22
0
    def Run(self, args):
        ve_dir = config.Paths().virtualenv_dir
        if not util.VirtualEnvExists(ve_dir):
            log.error('Virtual env does not exist at {}.'.format(ve_dir))
            raise exceptions.ExitCodeNoError(exit_code=1)

        # The Python version being used.
        python_version = 'NOT AVAILABLE'

        def _ver(output):
            self._version_output = output

        ec = execution_utils.Exec(
            ['{}/bin/python3'.format(ve_dir), '--version'],
            no_exit=True,
            out_func=_ver)
        if ec == 0:
            version_parts = self._version_output.split(' ')
            if len(version_parts) == 2:
                python_version = version_parts[1]

        # The modules installed in the environment.
        modules = []

        def _mod_output(output):
            self._modules_stdout = output

        execution_utils.Exec(['{}/bin/pip3'.format(ve_dir), 'freeze'],
                             no_exit=True,
                             out_func=_mod_output)
        for l in self._modules_stdout.split('\n'):
            if '==' in l:
                mn, mv = l.split('==')
                modules.append(Module(mn, mv))

        # The enable|disable state of the virtual env environment.
        ve_enabled = False
        if util.EnableFileExists(ve_dir):
            ve_enabled = True

        return VirtualEnvInfo(python_version, modules, ve_enabled)
Exemplo n.º 23
0
    def AllConfigs(include_none_config=False):
        """Returns all the configurations that exist.

    This determines the currently active configuration so as a side effect it
    will create the default configuration if no configurations exist.

    Args:
      include_none_config: bool, True to include the NONE configuration in the
        list. This is a reserved configuration that indicates to not use any
        configuration.  It is not explicitly created but is always available.

    Returns:
      {str, Configuration}, A map of configuration name to the configuration
      object.
    """
        config_dir = config.Paths().named_config_directory

        active_config = ConfigurationStore.ActiveConfig()
        active_config_name = active_config.name

        configs = {}
        if include_none_config:
            configs[_NO_ACTIVE_CONFIG_NAME] = Configuration(
                _NO_ACTIVE_CONFIG_NAME,
                _NO_ACTIVE_CONFIG_NAME == active_config_name)

        try:
            config_files = os.listdir(config_dir)
            for f in config_files:
                m = re.match(_CONFIG_FILE_REGEX, f)
                if m:
                    name = m.group(1)
                    configs[name] = Configuration(name,
                                                  name == active_config_name)
            return configs
        except (OSError, IOError) as exc:
            if exc.errno != errno.ENOENT:
                raise NamedConfigFileAccessError(
                    'List of configurations could not be read from: [{0}]'.
                    format(config_dir), exc)
        return {}
Exemplo n.º 24
0
def RunKubectlCommand(args, out_func=None, err_func=None):
    """Shells out a command to kubectl.

  This command should be called within the context of a TemporaryKubeconfig
  context manager in order for kubectl to be configured to access the correct
  cluster.

  Args:
    args: list of strings, command line arguments to pass to the kubectl
        command. Should omit the kubectl command itself. For example, to
        execute 'kubectl get pods', provide ['get', 'pods'].
    out_func: str->None, a function to call with the stdout of the kubectl
        command
    err_func: str->None, a function to call with the stderr of the kubectl
        command

  Raises:
    Error: if kubectl could not be called
    KubectlError: if the invocation of kubectl was unsuccessful
  """
    # Check for 'kubectl' along Cloud SDK path. This will fail if component
    # manager is disabled. In this case, check entire path.
    kubectl_path = files.FindExecutableOnPath(_KUBECTL_COMPONENT_NAME,
                                              config.Paths().sdk_bin_path)
    if kubectl_path is None:
        kubectl_path = files.FindExecutableOnPath(_KUBECTL_COMPONENT_NAME)
    if kubectl_path is None:
        raise Error(MISSING_KUBECTL_MSG)

    try:
        retval = execution_utils.Exec(execution_utils.ArgsForExecutableTool(
            kubectl_path, *args),
                                      no_exit=True,
                                      out_func=out_func,
                                      err_func=err_func,
                                      universal_newlines=True)
    except (execution_utils.PermissionError,
            execution_utils.InvalidCommandError) as e:
        raise KubectlError(six.text_type(e))
    if retval:
        raise KubectlError('kubectl returned non-zero status code.')
Exemplo n.º 25
0
def TryEnsureWriteableNamedConfig():
    """Create a named config for new/legacy users.

  Returns: None

  Raises:
      IOError, if there's a problem creating a new configuration.
  """

    # Don't try to update if the user has named configs.  LHS side of the `or`
    # helps if an otherwise new user has --configuration NONE.  The RHS if a
    # user has named configs but has deleted their activator file.  (Let's not
    # mess with their state any more in the latter case.)
    if GetNameOfActiveNamedConfig() or ListNamedConfigs():
        return

    logging.warn('Creating and activating new configuration [%s].',
                 AUTO_UPGRADE_NEW_CONFIG_NAME)

    CreateNamedConfig(AUTO_UPGRADE_NEW_CONFIG_NAME)

    legacy_properties = None
    try:
        with open(config.Paths().user_properties_path, 'r+') as f:
            legacy_properties = f.read()
            f.truncate(0)
            f.seek(0)
            f.write('# This properties file has been superseded by named\n'
                    '# configurations.  Editing it will have no effect.\n\n')
            f.write(legacy_properties)
    except IOError:
        # Best effort read and update of old properties file.
        pass

    if legacy_properties is not None:
        logging.warn('Importing legacy user properties.')
        with open(GetPathForConfigName(AUTO_UPGRADE_NEW_CONFIG_NAME),
                  'w') as ff:
            ff.write(legacy_properties)

    ActivateNamedConfig(AUTO_UPGRADE_NEW_CONFIG_NAME)
Exemplo n.º 26
0
 def testGetCredentialsGcloudAuth(self):
     properties.VALUES.container.use_client_certificate.Set(None)
     properties.VALUES.container.use_app_default_credentials.Set(False)
     c_config = self._TestGetCredentials(
         self._RunningClusterForVersion('1.5.0'))
     kubeconfig = kconfig.Kubeconfig.Default()
     bin_name = 'gcloud'
     if platforms.OperatingSystem.IsWindows():
         bin_name = 'gcloud.cmd'
     path = os.path.join(core_config.Paths().sdk_bin_path, bin_name)
     self.assertTrue(os.path.isfile(path))
     self.assertDictEqual(
         kubeconfig.users[c_config.kube_context]['user']['auth-provider'], {
             'name': 'gcp',
             'config': {
                 'cmd-path': path,
                 'cmd-args': 'config config-helper --format=json',
                 'token-key': '{.credential.access_token}',
                 'expiry-key': '{.credential.token_expiry}',
             }
         })
Exemplo n.º 27
0
def _IndexDirPath():
    """Locates the path for the directory where help search index should be.

  Raises:
    NoSdkRootException: if no SDK root is found.

  Returns:
    str, the path to the directory.
  """
    paths = config.Paths()
    if paths.sdk_root is None:
        raise NoSdkRootException(
            'No SDK root for this installation found. Help '
            'search index cannot be located.')
    # Table will be stored at root/.install/help_text.
    index_dir_path = os.path.join(paths.sdk_root, paths.CLOUDSDK_STATE_DIR,
                                  'help_text')
    # Ensure directory exists.
    files.MakeDir(index_dir_path)

    return index_dir_path
Exemplo n.º 28
0
def main():
    """Launches gcutil."""

    args = []

    project, account = bootstrapping.GetActiveProjectAndAccount()

    if account:
        if account in c_gce.Metadata().Accounts():
            args += ['--auth_service_account', account]
        else:
            ms_path = config.Paths().LegacyCredentialsMultistorePath(account)
            args += ['--credentials_file', ms_path]
            args += ['--auth_service_account=']

    if project:
        args += ['--project', project]

    args.append('--nocheck_for_new_version')

    bootstrapping.ExecutePythonTool('platform/gcutil', 'gcutil', *args)
Exemplo n.º 29
0
def Revoke(account=None):
    """Revoke credentials and clean up related files.

  Args:
    account: str, The account address for the credentials to be revoked. If
        None, the currently active account is used.

  Raises:
    NoActiveAccountException: If account is not provided and there is no
        active account.
    NoCredentialsForAccountException: If the provided account is not tied to any
        known credentials.
    RevokeError: If there was a more general problem revoking the account.
  """
    if not account:
        account = properties.VALUES.core.account.Get()
    if not account:
        raise NoActiveAccountException()

    if account in c_gce.Metadata().Accounts():
        raise RevokeError('Cannot revoke GCE-provided credentials.')

    creds = Load(account)
    if not creds:
        raise NoCredentialsForAccountException(account)

    if isinstance(creds, c_devshell.DevshellCredentials):
        raise RevokeError(
            'Cannot revoke the automatically provisioned Cloud Shell credential.'
            'This comes from your browser session and will not persist outside'
            'of your connected Cloud Shell session.')

    RevokeCredentials(creds)

    store = _StorageForAccount(account)
    if store:
        store.delete()

    _GetLegacyGen(account, creds).Clean()
    files.RmTree(config.Paths().LegacyCredentialsDir(account))
Exemplo n.º 30
0
def _ActiveConfigNameFromFile():
    """Gets the name of the user's active named config according to the file.

  Returns:
    str, The name of the active configuration or None.
  """
    path = config.Paths().named_config_activator_path

    try:
        with open(path, 'r') as f:
            config_name = f.read()
            # If the file is empty, treat it like the file does not exist.
            if config_name:
                return config_name
    except (OSError, IOError) as exc:
        if exc.errno != errno.ENOENT:
            raise NamedConfigFileAccessError(
                'Active configuration name could not be read from: [{0}]'.
                format(path), exc)

    # The active named config pointer file is missing, return None
    return None