def ClearAllConfigurations(self):
     if os.path.exists(self.named_config_dir):
         files.RmTree(self.named_config_dir)
     if os.path.isfile(self.named_config_activator):
         os.remove(self.named_config_activator)
     elif os.path.isdir(self.named_config_activator):
         files.RmTree(self.named_config_activator)
Beispiel #2
0
 def _Teardown(self):
     """Does the actual teardown. Deletes the tmpdir and the VM."""
     try:
         # Let users know, because this can take a while and the command appears
         # hung. Don't use a progress tracker because there's other output
         # occurring at the same time.
         log.status.Print(
             'Beginning teardown of remote build environment (this '
             'may take a few seconds).')
         # It looks like the --verbosity parameter is not threadsafe, and this
         # runs in parallel. Any verbosity options we pass to this command can
         # override the rest of the command and mask errors. See b/22725326 for
         # more context.
         self._cli.Execute([
             'compute', 'instances', 'delete', self._name, '--zone',
             self._zone, '--project', self._project, '-q'
         ])
     except (SystemExit, exceptions.ToolException) as e:
         log.error(
             'There was an error tearing down the remote build VM. Please '
             'check that the VM was deleted.')
         log.file_only_logger.error('Teardown error: %s',
                                    e,
                                    exc_info=sys.exc_info())
     files.RmTree(self.cert_dir)
Beispiel #3
0
    def RestoreBackup(self):
        """Restore the backup from this install state if it exists.

    If this installation has a backup stored in it (created by and update that
    used ReplaceWith(), above), it replaces this installation with the backup,
    using a temporary staging area.  This installation is moved to the trash
    directory under the installation that exists after this is done.  The trash
    directory can be removed at any point in the future.  We just don't want to
    delete code that is running since some platforms have a problem with that.

    Returns:
      bool, True if there was a backup to restore, False otherwise.
    """
        if not self.HasBackup():
            return False

        if os.path.exists(self.__sdk_staging_root):
            file_utils.RmTree(self.__sdk_staging_root)

        file_utils.MoveDir(self.__backup_directory, self.__sdk_staging_root)
        staging_state = InstallationState(self.__sdk_staging_root)
        staging_state.ClearTrash()
        # pylint: disable=protected-access, This is an instance of InstallationState
        file_utils.MoveDir(self.__sdk_root, staging_state.__trash_directory)
        file_utils.MoveDir(staging_state.__sdk_root, self.__sdk_root)
        return True
    def testRepoInfo(self):
        git_dir = os.path.join(self.test_dir, 'git_dir')

        try:
            subprocess.check_call(['git', 'init', git_dir])
            subprocess.check_call([
                'git', '-C', git_dir, 'config', 'user.email',
                '*****@*****.**'
            ])
            subprocess.check_call(
                ['git', '-C', git_dir, 'config', 'user.name', 'Dummy Name'])
            subprocess.check_call(
                ['git', '-C', git_dir, 'remote', 'add', 'origin', FAKE_URL])
            with open(os.path.join(git_dir, 'dummy.txt'), 'w') as f:
                f.write('hello world')
            subprocess.check_call(['git', '-C', git_dir, 'add', '-A'])
            subprocess.check_call(
                ['git', '-C', git_dir, 'commit', '-m', 'Dummy commit'])
            self.RunDebug([
                'source', 'gen-repo-info-file', '--source-directory', git_dir,
                '--output-directory', git_dir
            ])
            with open(os.path.join(git_dir, 'source-context.json'), 'r') as f:
                context = json.load(f)
                self.assertEqual(context.get('git', {}).get('url'), FAKE_URL)
        finally:
            # Removal of the git in the TearDown method is flaky. Unclear
            # why. It seems to work consistently here though.
            if os.path.exists(git_dir):
                files.RmTree(git_dir)
Beispiel #5
0
    def CreateStagingFromDownload(self, url):
        """Creates a new staging area from a fresh download of the Cloud SDK.

    Args:
      url: str, The url to download the new SDK from.

    Returns:
      An InstallationState object for the new install.

    Raises:
      installers.URLFetchError: If the new SDK could not be downloaded.
      InvalidDownloadError: If the new SDK was malformed.
    """
        if os.path.exists(self.__sdk_staging_root):
            file_utils.RmTree(self.__sdk_staging_root)

        with file_utils.TemporaryDirectory() as t:
            download_dir = os.path.join(t, '.download')
            extract_dir = os.path.join(t, '.extract')
            installers.ComponentInstaller.DownloadAndExtractTar(
                url, download_dir, extract_dir)
            files = os.listdir(extract_dir)
            if len(files) != 1:
                raise InvalidDownloadError()
            sdk_root = os.path.join(extract_dir, files[0])
            file_utils.MoveDir(sdk_root, self.__sdk_staging_root)

        staging_sdk = InstallationState(self.__sdk_staging_root)
        self.CopyMachinePropertiesTo(staging_sdk)
        return staging_sdk
Beispiel #6
0
def Revoke(account=None):
  """Revoke credentials and clean up related files.

  Args:
    account: str, The account address for the credentials to be revoked. If
        None, the currently active account is used.

  Raises:
    NoCredentialsForAccountException: If the provided account is not tied to any
        known credentials.
    RevokeError: If there was a more general problem revoking the account.
  """
  if not account:
    account = properties.VALUES.core.account.Get()

  if account in c_gce.Metadata().Accounts():
    raise RevokeError('Cannot revoke GCE-provided credentials.')

  creds = Load(account)
  if not creds:
    raise NoCredentialsForAccountException(account)

  # TODO(user): Remove this condition when oauth2client does not crash while
  # revoking SignedJwtAssertionCredentials.
  if creds and (not client.HAS_CRYPTO or
                type(creds) != client.SignedJwtAssertionCredentials):
    creds.revoke(_Http())
  store = _StorageForAccount(account)
  if store:
    store.delete()

  _GetLegacyGen(account, creds).Clean()
  files.RmTree(config.Paths().LegacyCredentialsDir(account))
    def Run(self, args):
        if util.IsPy2() and not args.IsSpecified('python_to_use'):
            log.error('Virtual env support requires Python 3.')
            raise exceptions.ExitCodeNoError(exit_code=3)
        if util.IsWindows():
            log.error('Virtual env support not enabled on Windows.')
            raise exceptions.ExitCodeNoError(exit_code=4)
        if args.IsSpecified('python_to_use'):
            python = args.python_to_use
        else:
            try:
                python = execution_utils.GetPythonExecutable()
            except ValueError:
                log.error('Failed to resolve python to use for virtual env.')
                raise exceptions.ExitCodeNoError(exit_code=5)

        ve_dir = config.Paths().virtualenv_dir
        if util.VirtualEnvExists(ve_dir):
            log.error('Virtual env setup {} already exists.'.format(ve_dir))
            raise exceptions.ExitCodeNoError(exit_code=5)

        succeeded_making_venv = False
        try:
            log.status.Print('Creating virtualenv...')
            # python -m venv is preferred as it aligns the python used with
            # the current in used Python.
            ec = execution_utils.Exec([python, '-m', 'venv', ve_dir],
                                      no_exit=True,
                                      err_func=log.file_only_logger.debug,
                                      out_func=log.file_only_logger.debug)
            if ec != 0:
                # Many linux vendors havea a history of having a broken python-venv
                # package that will not work correctly, debian for example. If -m venv
                # failed above we will attempt to use the virtualenv tool if it is
                # installed and exists in $PATH.
                ec = execution_utils.Exec(
                    ['virtualenv', '-q', '-p', python, ve_dir], no_exit=True)
                if ec != 0:
                    log.error('Virtual env setup failed.')
                    raise exceptions.ExitCodeNoError(exit_code=ec)
            log.status.Print('Installing modules...')
            install_modules = [
                '{}/bin/pip3'.format(ve_dir), 'install', '--log',
                '{}/install_module.log'.format(ve_dir), '-q',
                '--disable-pip-version-check'
            ]
            install_modules.extend(util.MODULES)
            ec = execution_utils.Exec(install_modules, no_exit=True)
            if ec == 0:
                # prevent the cleanup that occurs in finally block
                succeeded_making_venv = True
            else:
                log.error('Virtual env setup failed.')
                raise exceptions.ExitCodeNoError(exit_code=ec)
        finally:
            # If something went wrong we clean up any partial created ve_dir
            if not succeeded_making_venv:
                if util.VirtualEnvExists(ve_dir):
                    files.RmTree(ve_dir)
    def Uninstall(self, component_id, progress_callback=None):
        """Uninstalls the given component.

    Deletes all the files for this component and marks it as no longer being
    installed.

    Args:
      component_id: str, The id of the component to uninstall.
      progress_callback: f(float), A function to call with the fraction of
        completeness.
    """
        manifest = InstallationManifest(self._state_directory, component_id)
        paths = manifest.InstalledPaths()
        total_paths = len(paths)
        root = self.__sdk_root

        dirs_to_remove = set()
        pycache_dirs = set()
        for num, p in enumerate(paths, start=1):
            path = os.path.join(root, p)
            if os.path.isfile(path) or os.path.islink(path):
                os.remove(path)
                dir_path = os.path.dirname(os.path.normpath(p))
                if p.endswith('.py'):
                    # Python 2 processes leave behind .pyc files adjacent to the .py file;
                    # clean these up for any .py files being removed.
                    pyc_path = path + 'c'
                    if os.path.isfile(pyc_path):
                        os.remove(pyc_path)
                    # Python 3 processes leave behind __pycache__ folders in the .py
                    # file's directory; clean these up as well. Since the .pyc files
                    # within have different suffixes depending on the Python version, and
                    # the version of Python that compiled the file may differ from the
                    # current one running, it's faster to just delete the whole folder
                    # later instead of trying to match the file(s) here.
                    pycache_dirs.add(
                        os.path.join(root, dir_path, '__pycache__'))
                while dir_path:
                    dirs_to_remove.add(os.path.join(root, dir_path))
                    dir_path = os.path.dirname(dir_path)
            elif os.path.isdir(path):
                dirs_to_remove.add(os.path.normpath(path))
            if progress_callback:
                progress_callback(num / total_paths)

        for d in pycache_dirs:
            if os.path.isdir(d) and not os.path.islink(d):
                file_utils.RmTree(d)

        # Remove dirs from the bottom up.  Subdirs will always have a longer path
        # than it's parent.
        for d in sorted(dirs_to_remove, key=len, reverse=True):
            if os.path.isdir(
                    d) and not os.path.islink(d) and not os.listdir(d):
                os.rmdir(d)

        manifest.MarkUninstalled()
Beispiel #9
0
def Revoke(account=None):
    """Revoke credentials and clean up related files.

  Args:
    account: str, The account address for the credentials to be revoked. If
        None, the currently active account is used.

  Returns:
    'True' if this call revoked the account; 'False' if the account was already
    revoked.

  Raises:
    NoActiveAccountException: If account is not provided and there is no
        active account.
    NoCredentialsForAccountException: If the provided account is not tied to any
        known credentials.
    RevokeError: If there was a more general problem revoking the account.
  """
    if not account:
        account = properties.VALUES.core.account.Get()
    if not account:
        raise NoActiveAccountException()

    if account in c_gce.Metadata().Accounts():
        raise RevokeError('Cannot revoke GCE-provided credentials.')

    credentials = Load(account, prevent_refresh=True)
    if not credentials:
        raise NoCredentialsForAccountException(account)

    if isinstance(credentials, c_devshell.DevshellCredentials):
        raise RevokeError(
            'Cannot revoke the automatically provisioned Cloud Shell credential.'
            'This comes from your browser session and will not persist outside'
            'of your connected Cloud Shell session.')

    rv = False
    try:
        if not account.endswith('.gserviceaccount.com'):
            RevokeCredentials(credentials)
            rv = True
    except client.TokenRevokeError as e:
        if e.args[0] == 'invalid_token':
            # Malformed or already revoked
            pass
        elif e.args[0] == 'invalid_request':
            # Service account token
            pass
        else:
            raise

    store = creds.GetCredentialStore()
    store.Remove(account)

    _LegacyGenerator(account, credentials).Clean()
    files.RmTree(config.Paths().LegacyCredentialsDir(account))
    return rv
Beispiel #10
0
 def Purge(cls, cluster_name, zone_id, project_id):
     config_dir = cls.GetConfigDir(cluster_name, zone_id, project_id)
     if os.path.exists(config_dir):
         file_utils.RmTree(config_dir)
     # purge from kubeconfig
     kubeconfig = kconfig.Kubeconfig.Default()
     kubeconfig.Clear(cls.KubeContext(cluster_name, zone_id, project_id))
     kubeconfig.SaveToFile()
     log.debug('Purged cluster config from %s', config_dir)
    def testCantWriteConfigurationsDirFail(self):
        if os.path.exists(self.named_config_dir):
            files.RmTree(self.named_config_dir)
        with open(self.named_config_dir, 'w') as f:
            f.write('xxx')

        with self.assertRaisesRegex(named_configs.NamedConfigFileAccessError,
                                    r'Failed to create configuration \[foo\]'):
            self.Run('config configurations create foo')
Beispiel #12
0
 def Run(self, args):
     ve_dir = config.Paths().virtualenv_dir
     if not util.VirtualEnvExists(ve_dir):
         log.status.Print(
             'Virtual env does not exist at {}.'.format(ve_dir))
         raise exceptions.ExitCodeNoError(exit_code=1)
     console_io.PromptContinue(
         message='Delete virtual env setup at {}'.format(ve_dir),
         cancel_on_no=True)
     files.RmTree(ve_dir)
Beispiel #13
0
    def _ClearStaging(self, progress_callback=None):
        """Deletes the current staging directory if it exists.

    Args:
      progress_callback: f(float), A function to call with the fraction of
        completeness.
    """
        if os.path.exists(self.__sdk_staging_root):
            file_utils.RmTree(self.__sdk_staging_root)
        if progress_callback:
            progress_callback(1)
Beispiel #14
0
    def ClearTrash(self, progress_callback=None):
        """Deletes the current trash directory if it exists.

    Args:
      progress_callback: f(float), A function to call with the fraction of
        completeness.
    """
        if os.path.isdir(self.__trash_directory):
            file_utils.RmTree(self.__trash_directory)
        if progress_callback:
            progress_callback(1)
Beispiel #15
0
    def SetUp(self):
        super(WithRunApigee, self).SetUp()

        # Wipe out any config or cache state that might be left over from previous
        # tests.
        config_dir = config.Paths().global_config_dir
        for filename in os.listdir(config_dir):
            if not filename.startswith(".apigee"):
                continue
            full_path = os.path.join(config_dir, filename)
            if os.path.isdir(full_path):
                files.RmTree(full_path)
            else:
                os.unlink(full_path)
        self.Run("config unset project")
Beispiel #16
0
    def testAutoUpgradeCommonCase(self):
        # Make sure there's a global user property to import
        if os.path.exists(config.Paths().named_config_activator_path):
            os.remove(config.Paths().named_config_activator_path)
        with open(os.path.join(self.global_config_path, 'properties'),
                  'w') as f:
            f.write('[container]\ncluster = my_cluster\n')

        self.assertEqual((), tuple(self.Run('config configurations list')))

        self.Run('config set core/account mushroom')
        self.Run('config set core/project portobello')
        self.Run('config set compute/zone tree')
        self.Run('config set compute/region forest')

        self.assertEqual(
            self.Run('config list core/account')['core']['account'],
            'mushroom')

        self.Run('config configurations list')
        self.AssertOutputContains(
            'default True mushroom portobello tree forest',
            normalize_space=True)

        self.Run('config configurations describe default')
        self.AssertOutputContains('name: default')
        self.AssertOutputContains('is_active: true')
        self.AssertOutputContains('account: mushroom', normalize_space=True)
        self.AssertOutputContains('cluster: my_cluster', normalize_space=True)

        with open(os.path.join(self.global_config_path, 'properties')) as f:
            properties_contents = f.read()
            self.assertTrue('# This properties file has been superseded' in
                            properties_contents)
            self.assertFalse('mushroom' in properties_contents)

        # Make sure properties don't get imported again if we remove all configs.
        os.remove(config.Paths().named_config_activator_path)
        files.RmTree(config.Paths().named_config_directory)
        self.ClearOutput()
        self.Run('config set core/account mushroom')

        self.Run('config configurations describe default')
        self.AssertOutputContains('name: default')
        self.AssertOutputContains('is_active: true')
        self.AssertOutputContains('account: mushroom', normalize_space=True)
        self.AssertOutputNotContains('cluster: my_cluster',
                                     normalize_space=True)
    def testPersistProperty_UsesNamedConfiguration(self):
        self.StartPropertyPatch(config.Paths,
                                'sdk_root',
                                return_value=self.temp_path)

        # Remove all configurations.
        if os.path.exists(self.named_config_dir):
            files.RmTree(self.named_config_dir)
        if os.path.exists(self.named_config_activator):
            os.remove(self.named_config_activator)

        prop = Prop('foo', 'bar')
        properties.PersistProperty(prop, 'magic_value')

        # Now after persist we are upgraded to use named configuration.
        self.assertEqual('default',
                         named_configs.ConfigurationStore.ActiveConfig().name)
def UploadSource(source_dir, bucket, obj, storage_client):
    """Upload a gzipped tarball of the source directory to GCS.

  Note: To provide parity with docker's behavior, we must respect .dockerignore.

  Args:
    source_dir: the directory to be archived.
    bucket: the GCS bucket where the tarball will be stored.
    obj: the GCS object where the tarball will be stored, in the above bucket.
    storage_client: An instance of the storage_v1.StorageV1 client.

  Raises:
    UploadFailedError: when the source fails to upload to GCS.
  """
    dockerignore = os.path.join(source_dir, '.dockerignore')
    exclude = None
    if os.path.exists(dockerignore):
        with open(dockerignore) as f:
            # Read the exclusions, filtering out blank lines.
            exclude = set(filter(bool, f.read().splitlines()))
            # Remove paths that shouldn't be excluded on the client.
            exclude -= set(BLACKLISTED_DOCKERIGNORE_PATHS)
    # We can't use tempfile.NamedTemporaryFile here because ... Windows.
    # See https://bugs.python.org/issue14243. There are small cleanup races
    # during process termination that will leave artifacts on the filesystem.
    # eg, CTRL-C on windows leaves both the directory and the file. Unavoidable.
    # On Posix, `kill -9` has similar behavior, but CTRL-C allows cleanup.
    try:
        temp_dir = tempfile.mkdtemp()
        f = open(os.path.join(temp_dir, 'src.tgz'), 'w+b')
        # We are able to leverage the source archiving code from docker-py;
        # however, there are two wrinkles:
        # 1) The 3P code doesn't support gzip (it's expecting a local unix socket).
        #    So we create a GzipFile object and let the 3P code write into that.
        # 2) The .seek(0) call at the end of the 3P code causes GzipFile to throw an
        #    exception. So we use GzipFileIgnoreSeek as a workaround.
        with _GzipFileIgnoreSeek(mode='wb', fileobj=f) as gz:
            docker.utils.tar(source_dir, exclude, fileobj=gz)
        f.close()
        cloud_storage.CopyFileToGCS(bucket, f.name, obj, storage_client)
    finally:
        try:
            files.RmTree(temp_dir)
        except OSError:
            log.warn(
                'Could not remove temporary directory [{0}]'.format(temp_dir))
def ClearPyCache(root_dir=None):
    """Removes generic `__pycache__` folder and  '*.pyc' '*.pyo' files."""
    root_dir = root_dir or files.GetCWD()

    is_cleaned = False
    for name in os.listdir(root_dir):
        item = os.path.join(root_dir, name)
        if os.path.isdir(item):
            if name == '__pycache__':
                files.RmTree(item)
                is_cleaned = True
        else:
            _, ext = os.path.splitext(name)
            if ext in ['.pyc', '.pyo']:
                os.remove(item)
                is_cleaned = True

    return is_cleaned
Beispiel #20
0
    def CloneToStaging(self):
        """Clones this state to the temporary staging area.

    This is used for making temporary copies of the entire Cloud SDK
    installation when doing updates.  The entire installation is cloned, but
    doing so removes any backups and trash from this state before doing the
    copy.

    Returns:
      An InstallationState object for the cloned install.
    """
        if os.path.exists(self.__sdk_staging_root):
            file_utils.RmTree(self.__sdk_staging_root)
        self.ClearBackup()
        self.ClearTrash()
        shutil.copytree(self.__sdk_root,
                        self.__sdk_staging_root,
                        symlinks=True)
        return InstallationState(self.__sdk_staging_root)
Beispiel #21
0
 def _Teardown(self):
     """Does the actual teardown. Deletes the tmpdir and the VM."""
     try:
         # It looks like the --verbosity parameter is not threadsafe, and this
         # runs in parallel. Any verbosity options we pass to this command can
         # override the rest of the command and mask errors. See b/22725326 for
         # more context.
         self._cli.Execute([
             'compute', 'instances', 'delete', self._name, '--zone',
             self._zone, '-q'
         ])
     except (SystemExit, exceptions.ToolException) as e:
         log.error(
             'There was an error tearing down the remote build VM. Please '
             'check that the VM was deleted.')
         log.file_only_logger.error('Teardown error: %s',
                                    e,
                                    exc_info=sys.exc_info())
     files.RmTree(self.cert_dir)
Beispiel #22
0
def Revoke(account=None):
    """Revoke credentials and clean up related files.

  Args:
    account: str, The account address for the credentials to be revoked. If
        None, the currently active account is used.

  Raises:
    NoActiveAccountException: If account is not provided and there is no
        active account.
    NoCredentialsForAccountException: If the provided account is not tied to any
        known credentials.
    RevokeError: If there was a more general problem revoking the account.
  """
    if not account:
        account = properties.VALUES.core.account.Get()
    if not account:
        raise NoActiveAccountException()

    if account in c_gce.Metadata().Accounts():
        raise RevokeError('Cannot revoke GCE-provided credentials.')

    creds = Load(account)
    if not creds:
        raise NoCredentialsForAccountException(account)

    if isinstance(creds, c_devshell.DevshellCredentials):
        raise RevokeError(
            'Cannot revoke the automatically provisioned Cloud Shell credential.'
            'This comes from your browser session and will not persist outside'
            'of your connected Cloud Shell session.')

    RevokeCredentials(creds)

    store = _StorageForAccount(account)
    if store:
        store.delete()

    _GetLegacyGen(account, creds).Clean()
    files.RmTree(config.Paths().LegacyCredentialsDir(account))
Beispiel #23
0
    def testDeployPythonWithGit(self):
        app_root = os.path.join(self.test_dir, 'app_engine_python_with_git')
        git_dir = os.path.join(app_root, '.git')

        try:
            with gsutil_e2e_utils.ModifiedGsutilStateDir(self.Account()):
                subprocess.check_call(['git', 'init', app_root])
                subprocess.check_call([
                    'git', '-C', app_root, 'config', 'user.email',
                    '*****@*****.**'
                ])
                subprocess.check_call([
                    'git', '-C', app_root, 'config', 'user.name', 'Dummy Name'
                ])
                subprocess.check_call([
                    'git', '-C', app_root, 'remote', 'add', 'origin',
                    'https://github.com/NoSuchProject__/dummy.git'
                ])
                subprocess.check_call(['git', '-C', app_root, 'add', '-A'])
                subprocess.check_call(
                    ['git', '-C', app_root, 'commit', '-m', 'Dummy commit'])

                self.assertFalse(
                    glob.glob(os.path.join(app_root, 'source-cont*.json')))
                result = self._deployStandardApp(
                    'app_engine_python_with_git')[0]
                # Verify that the upload included generated source contexts.
                self.assertTrue('source-context.json' in result.stderr)

                # Ensure that the test didn't create any source context files in the
                # source directory.
                self.assertFalse(
                    glob.glob(os.path.join(app_root, 'source-cont*.json')))
        finally:
            # Removal of the git in the TearDown method is flaky. Unclear
            # why. It seems to work consistently here though.
            if os.path.exists(git_dir):
                files.RmTree(git_dir)
Beispiel #24
0
def Revoke(account=None, use_google_auth=False):
    """Revoke credentials and clean up related files.

  Args:
    account: str, The account address for the credentials to be revoked. If
        None, the currently active account is used.
    use_google_auth: bool, True to revoke the credentials as google auth
        credentials. False to revoke the credentials as oauth2client
        credentials.

  Returns:
    True if this call revoked the account; False if the account was already
    revoked.

  Raises:
    NoActiveAccountException: If account is not provided and there is no
        active account.
    NoCredentialsForAccountException: If the provided account is not tied to any
        known credentials.
    RevokeError: If there was a more general problem revoking the account.
  """
    # Import only when necessary to decrease the startup time. Move it to
    # global once google-auth is ready to replace oauth2client.
    # pylint: disable=g-import-not-at-top
    from googlecloudsdk.core.credentials import google_auth_credentials as c_google_auth
    # pylint: enable=g-import-not-at-top
    if not account:
        account = properties.VALUES.core.account.Get()
    if not account:
        raise NoActiveAccountException()

    if account in c_gce.Metadata().Accounts():
        raise RevokeError('Cannot revoke GCE-provided credentials.')

    credentials = Load(account,
                       prevent_refresh=True,
                       use_google_auth=use_google_auth)
    if not credentials:
        raise NoCredentialsForAccountException(account)

    if (isinstance(credentials, c_devshell.DevshellCredentials) or isinstance(
            credentials, c_devshell.DevShellCredentialsGoogleAuth)):
        raise RevokeError(
            'Cannot revoke the automatically provisioned Cloud Shell credential.'
            'This comes from your browser session and will not persist outside'
            'of your connected Cloud Shell session.')

    rv = False
    try:
        if not account.endswith('.gserviceaccount.com'):
            RevokeCredentials(credentials)
            rv = True
    except (client.TokenRevokeError, c_google_auth.TokenRevokeError) as e:
        if e.args[0] == 'invalid_token':
            # Malformed or already revoked
            pass
        elif e.args[0] == 'invalid_request':
            # Service account token
            pass
        else:
            raise

    store = c_creds.GetCredentialStore()
    store.Remove(account)

    _LegacyGenerator(account, credentials).Clean()
    legacy_creds_dir = config.Paths().LegacyCredentialsDir(account)
    if os.path.isdir(legacy_creds_dir):
        files.RmTree(legacy_creds_dir)
    return rv
Beispiel #25
0
    def Run(self, args):
        """Create the .gcloud folder, if possible.

    Args:
      args: argparse.Namespace, the arguments this command is run with.

    Raises:
      ToolException: on project initialization errors.

    Returns:
      The path to the new gcloud workspace.
    """
        # Ensure that we're logged in.
        creds = c_store.Load()

        is_new_directory = False

        try:
            workspace = workspaces.FromCWD()
            # Cannot re-init when in a workspace.
            current_project = workspace.GetProperty(
                properties.VALUES.core.project)
            if current_project != args.project:
                message = (
                    'Directory [{root_directory}] is already initialized to project'
                    ' [{project}].').format(
                        root_directory=workspace.root_directory,
                        project=current_project)
            else:
                message = (
                    'Directory [{root_directory}] is already initialized.'
                ).format(root_directory=workspace.root_directory)
            raise c_exc.ToolException(message)
        except workspaces.NoContainingWorkspaceException:
            workspace_dir = os.path.join(os.getcwd(), args.project)
            message = ('Directory [{root_directory}] is not empty.').format(
                root_directory=workspace_dir)
            if os.path.exists(workspace_dir) and os.listdir(workspace_dir):
                raise c_exc.ToolException(message)
            else:
                files.MakeDir(workspace_dir)
                is_new_directory = True
                workspace = workspaces.Create(workspace_dir)

        workspace.SetProperty(properties.VALUES.core.project, args.project)
        if args.devshell_image:
            workspace.SetProperty(properties.VALUES.devshell.image,
                                  args.devshell_image)

        # Everything that can fail should happen within this next try: block.
        # If something fails, and the result is an empty directory that we just
        # created, we clean it up.
        try:
            source_client = source_v0.SourceV0(credentials=creds)
            try:
                response = source_client.repos.List(
                    source_v0.SourceReposListRequest(projectId=args.project))
            except apitools_base.HttpError:
                # Source API is down! Let's guess the repo.
                log.status.write(
                    textwrap.dedent("""\
            Unable to fetch repository URL. Guessing the URL, but if your
            project uses repo-sync then the cloned repository may be read-only.
            """))
                try:
                    workspace.CloneProjectRepository(
                        args.project, workspaces.DEFAULT_REPOSITORY_ALIAS)
                except workspaces.CannotFetchRepositoryException as e:
                    log.error(e)
            else:
                for repo in response.repos:
                    try:
                        workspace.CloneProjectRepository(
                            args.project, repo.repoName, repo.cloneUrl)
                    except workspaces.CannotFetchRepositoryException as e:
                        log.error(e)
        finally:
            cleared_files = False
            if is_new_directory:
                dir_files = os.listdir(workspace_dir)
                if not dir_files or dir_files == [
                        config.Paths().CLOUDSDK_WORKSPACE_CONFIG_DIR_NAME
                ]:
                    log.error((
                        'Unable to initialize project [{project}], cleaning up'
                        ' [{path}].').format(project=args.project,
                                             path=workspace_dir))
                    files.RmTree(workspace_dir)
                    cleared_files = True
        if cleared_files:
            raise c_exc.ToolException(
                'Unable to initialize project [{project}].'.format(
                    project=args.project))
        log.status.write(
            'Project [{prj}] was initialized in [{path}].\n'.format(
                path=workspace.root_directory, prj=args.project))

        return workspace
Beispiel #26
0
 def _RmPath(self, path):
     if os.path.isfile(path):
         os.unlink(path)
     elif os.path.isdir(path):
         file_utils.RmTree(path)
 def ResetCache():
     cache_dir = config.Paths().completion_cache_dir
     if os.path.isdir(cache_dir):
         files.RmTree(cache_dir)
Beispiel #28
0
 def Delete(self):
     """Permanently deletes the persistent cache."""
     self.Close(commit=False)
     if self._persistent:
         files.RmTree(self.name)
         self._persistent = False
def DeleteDeprecatedCache():
  """Silently deletes the deprecated resource completion cache if it exists."""
  cache_dir = config.Paths().completion_cache_dir
  if os.path.isdir(cache_dir):
    files.RmTree(cache_dir)
    def testCompilePython(self):
        self.SetEncoding('utf8')
        py_file_contents = 'a = 1 + 1'
        to_compile = [
            os.path.join('bin', 'bootstrapping', 'foo.py'),
            os.path.join('bin', 'bootstrapping', 'bar', 'foo.py'),
            os.path.join('lib', 'foo.py'),
            os.path.join('lib', 'bar', 'foo.py'),
            os.path.join('platform', 'foo.py'),
            os.path.join('platform', 'bar', 'foo.py'),
        ]
        no_compile = [
            # Not python.
            'a',
            # Don't compile things in the root.
            'b.py',
            # Don't compile things directly in bin.
            os.path.join('bin', 'c.py'),
            # Some other random directory.
            os.path.join('notincluded', 'd.py'),
            # This file will have invalid contents.
            'junk.py'
        ]
        for f in to_compile:
            self.Touch(self.sdk_root_path, f, py_file_contents, makedirs=True)
        for f in no_compile:
            self.Touch(self.sdk_root_path, f, py_file_contents, makedirs=True)
        self.Touch(self.sdk_root_path, 'junk.py', ':')
        self.SetEncoding('ascii')

        install_state = local_state.InstallationState(self.sdk_root_path)
        install_state.CompilePythonFiles()

        # Depending on the Python version, compiled files might be located in the
        # same dir or in the location specified by PEP-3147.
        def _FileMatchesInDir(dirname, regex):
            for _, _, filenames in os.walk(six.text_type(dirname)):
                for filename in filenames:
                    if re.match(regex, filename):
                        return True
            return False

        for f in to_compile:
            d, basename = os.path.split(os.path.join(self.sdk_root_path, f))
            file_name, extension = basename.split('.', 1)
            regex = '{0}.(.*){1}c'.format(file_name, re.escape(extension))
            self.assertTrue(_FileMatchesInDir(d, regex))
        for f in no_compile:
            if f.endswith('.py'):
                d, basename = os.path.split(os.path.join(
                    self.sdk_root_path, f))
                file_name, extension = basename.split('.', 1)
                regex = '{0}.(.*){1}c'.format(file_name, re.escape(extension))
                self.assertFalse(_FileMatchesInDir(d, regex))
            else:
                d, basename = os.path.split(os.path.join(
                    self.sdk_root_path, f))
                regex = '{0}(.*).pyc'.format(basename)
                self.assertFalse(_FileMatchesInDir(d, regex))

        # Ensure this doesn't crash when one of the directories is missing
        files.RmTree(os.path.join(self.sdk_root_path, 'platform'))
        install_state.CompilePythonFiles()