def _GetToolEnv(env=None): """Generate the environment that should be used for the subprocess. Args: env: {str, str}, An existing environment to augment. If None, the current environment will be cloned and used as the base for the subprocess. Returns: The modified env. """ if env is None: env = dict(os.environ) env = encoding.EncodeEnv(env) encoding.SetEncodedValue(env, 'CLOUDSDK_WRAPPER', '1') # Flags can set properties which override the properties file and the existing # env vars. We need to propagate them to children processes through the # environment so that those commands will use the same settings. for s in properties.VALUES: for p in s: encoding.SetEncodedValue(env, p.EnvironmentName(), p.Get(required=False, validate=False)) # Configuration needs to be handled separately because it's not a real # property (although it behaves like one). encoding.SetEncodedValue( env, config.CLOUDSDK_ACTIVE_CONFIG_NAME, named_configs.ConfigurationStore.ActiveConfig().name) return env
def _GetToolEnv(): env = dict(os.environ) encoding.SetEncodedValue(env, 'CLOUDSDK_WRAPPER', '1') encoding.SetEncodedValue(env, 'CLOUDSDK_VERSION', config.CLOUD_SDK_VERSION) encoding.SetEncodedValue(env, 'CLOUDSDK_PYTHON', execution_utils.GetPythonExecutable()) return env
def _GetGKEKubeconfig(api_adapter, project, location_id, cluster_id, temp_kubeconfig_dir, internal_ip, cross_connect_subnetwork, private_endpoint_fqdn): """The kubeconfig of GKE Cluster is fetched using the GKE APIs. The 'KUBECONFIG' value in `os.environ` will be temporarily updated with the temporary kubeconfig's path if the kubeconfig arg is not None. Consequently, subprocesses started with googlecloudsdk.core.execution_utils.Exec will see the temporary KUBECONFIG environment variable. Using GKE APIs the GKE cluster is validated, and the ClusterConfig object, is persisted in the temporarily updated 'KUBECONFIG'. Args: api_adapter: the GKE api adapter used for running kubernetes commands project: string, the project id of the cluster for which kube config is to be fetched location_id: string, the id of the location to which the cluster belongs cluster_id: string, the id of the cluster temp_kubeconfig_dir: TemporaryDirectory object internal_ip: whether to persist the internal IP of the endpoint. cross_connect_subnetwork: full path of the cross connect subnet whose endpoint to persist (optional) private_endpoint_fqdn: whether to persist the private fqdn. Raises: Error: If unable to get credentials for kubernetes cluster. Returns: the path to the kubeconfig file """ kubeconfig = os.path.join(temp_kubeconfig_dir.path, 'kubeconfig') old_kubeconfig = encoding.GetEncodedValue(os.environ, 'KUBECONFIG') try: encoding.SetEncodedValue(os.environ, 'KUBECONFIG', kubeconfig) if api_adapter is None: api_adapter = gke_api_adapter.NewAPIAdapter('v1') cluster_ref = api_adapter.ParseCluster(cluster_id, location_id, project) cluster = api_adapter.GetCluster(cluster_ref) auth = cluster.masterAuth valid_creds = auth and auth.clientCertificate and auth.clientKey # c_util.ClusterConfig.UseGCPAuthProvider() checks for # container/use_client_certificate setting if not valid_creds and not c_util.ClusterConfig.UseGCPAuthProvider(): raise c_util.Error( 'Unable to get cluster credentials. User must have edit ' 'permission on {}'.format(cluster_ref.projectId)) c_util.ClusterConfig.Persist(cluster, cluster_ref.projectId, internal_ip, cross_connect_subnetwork, private_endpoint_fqdn) finally: if old_kubeconfig: encoding.SetEncodedValue(os.environ, 'KUBECONFIG', old_kubeconfig) else: del os.environ['KUBECONFIG'] return kubeconfig
def SetUp(self): self.imports = ImportMocker() self.imports.SetImport('ctypes', None) self.imports.SetImport('fcntl', None) self.imports.SetImport('termios', None) self.StartEnvPatch({}) encoding.SetEncodedValue(os.environ, 'COLUMNS', '83') encoding.SetEncodedValue(os.environ, 'LINES', '27') self.check_output = self.StartObjectPatch(subprocess, 'check_output') self.check_output.side_effect = OSError
def testDeployGoUsingApi(self): """Deploy a standard Go app. Ensures that the staging step is invoked, since a depencency will be intentionally be placed outside the app directory -- in the GOPATH. The test is designed to fail if the staging step/vendoring fails. """ encoding.SetEncodedValue(os.environ, 'GOPATH', os.path.join(self.test_dir, 'gopath')) try: self._deployStandardApp('app_engine_go111_data') finally: encoding.SetEncodedValue(os.environ, 'GOPATH', None)
def testLoad(self): self.ClearAllConfigurations() named_configs.ConfigurationStore.CreateConfig('foo') named_configs.ConfigurationStore.ActivateConfig('foo') load_mock = self.StartObjectPatch(properties_file, 'PropertiesFile') named_configs.ActivePropertiesFile.Invalidate() named_configs.ActivePropertiesFile.Load() load_mock.assert_called_with([ config.Paths().installation_properties_path, os.path.join(self.named_config_dir, 'config_foo') ]) named_configs.ActivePropertiesFile.Load() load_mock.assert_called_with([ config.Paths().installation_properties_path, os.path.join(self.named_config_dir, 'config_foo') ]) named_configs.ActivePropertiesFile.Invalidate() named_configs.ConfigurationStore.ActivateConfig('NONE') load_mock.reset_mock() named_configs.ActivePropertiesFile.Load() load_mock.assert_called_with( [config.Paths().installation_properties_path, None]) named_configs.ActivePropertiesFile.Invalidate() encoding.SetEncodedValue(os.environ, 'CLOUDSDK_ACTIVE_CONFIG_NAME', 'other') load_mock.reset_mock() named_configs.ActivePropertiesFile.Load() load_mock.assert_called_with([ config.Paths().installation_properties_path, os.path.join(self.named_config_dir, 'config_other') ])
def testExternalPagerEnviron(self): encoding.SetEncodedValue(os.environ, 'PAGER', 'more') console_io.More(self.contents) self.popen.assert_called_once_with('more', stdin=subprocess.PIPE, shell=True) self.assertFalse(self.ran)
def TemporaryKubeconfig(location_id, cluster_id): """Context manager that manages a temporary kubeconfig file for a GKE cluster. The kubeconfig file will be automatically created and destroyed and will contain only the credentials for the specified GKE cluster. The 'KUBECONFIG' value in `os.environ` will be temporarily updated with the temporary kubeconfig's path. Consequently, subprocesses started with googlecloudsdk.core.execution_utils.Exec while in this context manager will see the temporary KUBECONFIG environment variable. Args: location_id: string, the id of the location to which the cluster belongs cluster_id: string, the id of the cluster Raises: Error: If unable to get credentials for kubernetes cluster. Returns: the path to the temporary kubeconfig file Yields: Due to b/73533917, linter crashes without yields. """ gke_util.CheckKubectlInstalled() with files.TemporaryDirectory() as tempdir: kubeconfig = os.path.join(tempdir, 'kubeconfig') old_kubeconfig = encoding.GetEncodedValue(os.environ, KUBECONFIG_ENV_VAR_NAME) try: encoding.SetEncodedValue(os.environ, KUBECONFIG_ENV_VAR_NAME, kubeconfig) gke_api = gke_api_adapter.NewAPIAdapter(GKE_API_VERSION) cluster_ref = gke_api.ParseCluster(cluster_id, location_id) cluster = gke_api.GetCluster(cluster_ref) auth = cluster.masterAuth missing_creds = not (auth and auth.clientCertificate and auth.clientKey) if missing_creds and not gke_util.ClusterConfig.UseGCPAuthProvider( ): raise Error( 'Unable to get cluster credentials. User must have edit ' 'permission on {}'.format(cluster_ref.projectId)) gke_util.ClusterConfig.Persist(cluster, cluster_ref.projectId) yield kubeconfig finally: encoding.SetEncodedValue(os.environ, KUBECONFIG_ENV_VAR_NAME, old_kubeconfig)
def testWorkflow(self): self.ClearAllConfigurations() self.AssertExistingConfigs('default', 'default') named_configs.ConfigurationStore.CreateConfig('foo') self.AssertExistingConfigs('default', 'default', 'foo') with self.assertRaises(named_configs.NamedConfigError): named_configs.ConfigurationStore.CreateConfig('foo') self.AssertExistingConfigs('default', 'default', 'foo') named_configs.ConfigurationStore.CreateConfig('bar') self.AssertExistingConfigs('default', 'default', 'foo', 'bar') named_configs.ConfigurationStore.ActivateConfig('foo') self.AssertExistingConfigs('foo', 'default', 'foo', 'bar') with self.assertRaises(named_configs.NamedConfigError): named_configs.ConfigurationStore.ActivateConfig('baz') self.AssertExistingConfigs('foo', 'default', 'foo', 'bar') with self.assertRaises(named_configs.NamedConfigError): named_configs.ConfigurationStore.DeleteConfig('foo') self.AssertExistingConfigs('foo', 'default', 'foo', 'bar') encoding.SetEncodedValue(os.environ, 'CLOUDSDK_ACTIVE_CONFIG_NAME', 'bar') with self.assertRaisesRegex(named_configs.NamedConfigError, 'currently active'): named_configs.ConfigurationStore.DeleteConfig('bar') with self.assertRaisesRegex(named_configs.NamedConfigError, 'gcloud properties'): named_configs.ConfigurationStore.DeleteConfig('foo') self.AssertExistingConfigs('bar', 'default', 'foo', 'bar') encoding.SetEncodedValue(os.environ, 'CLOUDSDK_ACTIVE_CONFIG_NAME', None) named_configs.ConfigurationStore.DeleteConfig('bar') self.AssertExistingConfigs('foo', 'default', 'foo') with self.assertRaisesRegex(named_configs.NamedConfigError, 'does not exist'): named_configs.ConfigurationStore.DeleteConfig('bar') named_configs.ConfigurationStore.ActivateConfig('default') self.AssertExistingConfigs('default', 'default', 'foo') named_configs.ConfigurationStore.DeleteConfig('foo')
def testExternalPagerDefaultWithLess(self): encoding.SetEncodedValue(os.environ, 'LESS', '-Z') console_io.More(self.contents) self.popen.assert_called_once_with('less', stdin=subprocess.PIPE, shell=True) self.assertFalse(self.ran) self.assertEqual('-R-Z', self.less_env)
def More(contents, out=None, prompt=None, check_pager=True): """Run a user specified pager or fall back to the internal pager. Args: contents: The entire contents of the text lines to page. out: The output stream, log.out (effectively) if None. prompt: The page break prompt. check_pager: Checks the PAGER env var and uses it if True. """ if not IsInteractive(output=True): if not out: out = log.out out.write(contents) return if not out: # Rendered help to the log file. log.file_only_logger.info(contents) # Paging shenanigans to stdout. out = sys.stdout if check_pager: pager = encoding.GetEncodedValue(os.environ, 'PAGER', None) if pager == '-': # Use the fallback Pager. pager = None elif not pager: # Search for a pager that handles ANSI escapes. for command in ('less', 'pager'): if files.FindExecutableOnPath(command): pager = command break if pager: # If the pager is less(1) then instruct it to display raw ANSI escape # sequences to enable colors and font embellishments. less_orig = encoding.GetEncodedValue(os.environ, 'LESS', None) less = '-R' + (less_orig or '') encoding.SetEncodedValue(os.environ, 'LESS', less) p = subprocess.Popen(pager, stdin=subprocess.PIPE, shell=True) enc = console_attr.GetConsoleAttr().GetEncoding() p.communicate(input=contents.encode(enc)) p.wait() if less_orig is None: encoding.SetEncodedValue(os.environ, 'LESS', None) return # Fall back to the internal pager. console_pager.Pager(contents, out, prompt).Run()
def _GetGKEKubeconfig(location_id, cluster_id, temp_kubeconfig_dir): """The kubeconfig of GKE Cluster is fetched using the GKE APIs. The 'KUBECONFIG' value in `os.environ` will be temporarily updated with the temporary kubeconfig's path if the kubeconfig arg is not None. Consequently, subprocesses started with googlecloudsdk.core.execution_utils.Exec will see the temporary KUBECONFIG environment variable. Using GKE APIs the GKE cluster is validated, and the ClusterConfig object, is persisted in the temporarily updated 'KUBECONFIG'. Args: location_id: string, the id of the location to which the cluster belongs cluster_id: string, the id of the cluster temp_kubeconfig_dir: TemporaryDirectory object Raises: Error: If unable to get credentials for kubernetes cluster. Returns: the path to the kubeconfig file """ kubeconfig = os.path.join(temp_kubeconfig_dir.path, 'kubeconfig') old_kubeconfig = encoding.GetEncodedValue(os.environ, 'KUBECONFIG') try: encoding.SetEncodedValue(os.environ, 'KUBECONFIG', kubeconfig) gke_api = gke_api_adapter.NewAPIAdapter('v1') cluster_ref = gke_api.ParseCluster(cluster_id, location_id) cluster = gke_api.GetCluster(cluster_ref) auth = cluster.masterAuth valid_creds = auth and auth.clientCertificate and auth.clientKey # c_util.ClusterConfig.UseGCPAuthProvider() checks for # container/use_client_certificate setting if not valid_creds and not c_util.ClusterConfig.UseGCPAuthProvider(): raise c_util.Error( 'Unable to get cluster credentials. User must have edit ' 'permission on {}'.format(cluster_ref.projectId)) c_util.ClusterConfig.Persist(cluster, cluster_ref.projectId) finally: if old_kubeconfig: encoding.SetEncodedValue(os.environ, 'KUBECONFIG', old_kubeconfig) else: del os.environ['KUBECONFIG'] return kubeconfig
def RunScenario(self, scenario): """Runs `gcloud meta test <scenario>`.""" self.ptyRequired() # First get the path of the python gcloud main module. # This could be different in unit and bundled tests. sdk_path = googlecloudsdk.__file__ for _ in range(2): sdk_path = os.path.dirname(sdk_path) gcloud = os.path.join(sdk_path, 'gcloud.py') if os.path.exists(gcloud): break # Make sure the gcloud main imports are visible. env = os.environ.copy() python_path = encoding.GetEncodedValue(env, 'PYTHONPATH') python_path = os.pathsep.join(([python_path] if python_path else []) + [sdk_path]) encoding.SetEncodedValue(env, 'PYTHONPATH', python_path) # Prevent gcloud wrapper script fallback to a python different from the one # running this test. encoding.SetEncodedValue(env, 'CLOUDSDK_PYTHON', sys.executable) # Subprocess stderr=X redirection requires file streams, not buffers. # stderr=subprocess.PIPE only works reliably with p=subprocess.Popen() and # p.communicate(), but that messes up meta test signal delivery by absorbing # it -- we would never see it here. try: stderr_path = os.path.join(self.temp_path, 'stderr') stderr = io.open(stderr_path, 'w') command_args = ['/bin/bash', self.ptyshell, scenario] subprocess.check_call(command_args, stderr=stderr, env=env) finally: stderr.close() stderr = io.open(stderr_path, 'r') # Write the subprocess stderr to the buffer stream used by the # WithOutputCapture mixin. sys.stderr.write(stderr.read()) stderr.close() os.remove(stderr_path)
def SetUp(self): self.StartEnvPatch({}) encoding.SetEncodedValue(os.environ, 'LESS', None) encoding.SetEncodedValue(os.environ, 'PAGER', None) self.StartObjectPatch( files, 'FindExecutableOnPath').side_effect = self.MockFindExecutableOnPath self.StartObjectPatch(console_io, 'IsInteractive', side_effect=self.MockIsInteractive) self.StartObjectPatch(console_pager.Pager, 'Run', side_effect=self.MockRun) self.popen = self.StartObjectPatch(subprocess, 'Popen', side_effect=self.MockPopen) self.executables = True self.interactive = True self.less_env = None self.ran = False self.raw_chars = None self.contents = 'Here\nlies\nLes\nMoore.\nNo\nLes\nno\nmore.'
def testSetEncodedValueUnicode(self): self.StartObjectPatch(sys, 'getfilesystemencoding').return_value = 'utf-8' d = {} self.assertEqual(d, {}) value = 'Ṳᾔḯ¢◎ⅾℯ' encoding.SetEncodedValue(d, 'foo', value) raw = d['foo'] # If we're in python 3, the raw value is not encoded. In python 2, the raw # value is encoded. self.assertEqual(six.PY3, isinstance(raw, six.text_type)) actual = encoding.GetEncodedValue(d, 'foo') self.assertTrue(isinstance(actual, six.text_type)) self.assertEqual(value, actual)
def SetUp(self): self.imports = ImportMocker() self.imports.SetImport('ctypes', None) self.imports.SetImport('fcntl', self.MockFcntlModule) self.imports.SetImport('termios', self.MockTermiosModule) self.StartEnvPatch({}) encoding.SetEncodedValue(os.environ, 'COLUMNS', None) self.check_output = self.StartObjectPatch(subprocess, 'check_output') self.check_output.side_effect = OSError self.has_os_ctermid = hasattr(os, 'ctermid') if not self.has_os_ctermid: setattr(os, 'ctermid', None) self.ctermid = self.StartObjectPatch(os, 'ctermid') self.ctermid.return_value = os.devnull
def _AppendMetricsEnvironment(tag): """Appends tag to the Cloud SDK metrics environment tag. The metrics/environment tag is sent via the useragent. This tag is visible in metrics for all gcloud commands executed by the calling command. Args: tag: The string to append to the metrics/environment tag. """ metrics_environment = properties.VALUES.metrics.environment.Get() or '' if metrics_environment: metrics_environment += '.' metrics_environment += tag encoding.SetEncodedValue(os.environ, 'CLOUDSDK_METRICS_ENVIRONMENT', metrics_environment)
def _CreateAndDeleteCacheAcrossImplementations(self, create_implementation=None, delete_implementation=None): if create_implementation: encoding.SetEncodedValue(os.environ, 'CLOUDSDK_CACHE_IMPLEMENTATION', create_implementation) cache = resource_cache.ResourceCache() collection = updaters.NoCollectionUpdater(cache) parameter_info = collection.ParameterInfo() collection.Select(('*', ), parameter_info) cache.Close() if delete_implementation: encoding.SetEncodedValue(os.environ, 'CLOUDSDK_CACHE_IMPLEMENTATION', delete_implementation) resource_cache.Delete() with self.assertRaisesRegex(exceptions.CacheNotFound, r'resource.cache] not found'): resource_cache.Delete()
def SetUp(self): self._prev_log_level = log.getLogger().getEffectiveLevel() self.__root_dir = file_utils.TemporaryDirectory() self.root_path = self.__root_dir.path self.temp_path = self.CreateTempDir() self.global_config_path = self.CreateTempDir('config') encoding.SetEncodedValue(os.environ, config.CLOUDSDK_CONFIG, self.global_config_path, encoding='utf-8') # MONKEYPATCH: We'd like to StartObjectPatch but Popen is patched elsewhere. subprocess.Popen = self.__Popen # Redirect home to a temp directory. self.home_path = self.CreateTempDir() self.StartEnvPatch({'HOME': self.home_path}) self.mock_get_home_path = self.StartPatch( 'googlecloudsdk.core.util.files.GetHomeDir', return_value=self.home_path) self.mock_expandvars = self.StartPatch('os.path.expandvars', autospec=True, return_value=self.home_path) self.addCleanup(self._CloseDirs) self.addCleanup(resources.REGISTRY.Clear) # Make sure there is nothing in the environment before the tests starts. self.__CleanProperties(setup=True) self.addCleanup(self.__CleanProperties) # Turn these off for tests. properties.VALUES.GetInvocationStack()[:] = [{}] # This is not a real property but behaves like one. os.environ.pop('CLOUDSDK_ACTIVE_CONFIG_NAME', None) # pylint:disable=protected-access named_configs.FLAG_OVERRIDE_STACK._stack[:] = [] # Make sure certain things are restored between tests self.install_props = self._GetInstallPropsStats() self.addCleanup(self._VerifyInstallProps) properties.VALUES.core.interactive_ux_style.Set( properties.VALUES.core.InteractiveUXStyles.TESTING.name) # The mocking of stdout and stderr happen in the test_case module whenever # a test is using output capture. We need to reset the logger here to pick # up those settings. log.Reset()
def testEffectiveActiveConfigName(self): self.ClearAllConfigurations() self.assertEqual(None, named_configs._EffectiveActiveConfigName()) # From the file with open(self.named_config_activator, 'w') as f: f.write('foo') self.assertEqual('foo', named_configs._EffectiveActiveConfigName()) # Env override encoding.SetEncodedValue(os.environ, 'CLOUDSDK_ACTIVE_CONFIG_NAME', 'bar') self.assertEqual('bar', named_configs._EffectiveActiveConfigName()) named_configs.FLAG_OVERRIDE_STACK.Push('baz') self.assertEqual('baz', named_configs._EffectiveActiveConfigName())
def _AddPythonPathsToEnv(env): """Returns a copy of env with Python specific path vars added. This preserves any environment specific test runner tweaks. Args: env: {str: str}, Optional environment variables for the script. Returns: A copy of env with Python specific path vars added. """ # sys.path was initialized from PYTHONPATH at startup so we don't have to # check PYTHONPATH here. The result will be the original PYTHONPATH dirs # plus and dirs inserted/appened by Python startup and test runner # initialization. encoding.SetEncodedValue(env, 'PYTHONPATH', os.pathsep.join(sys.path)) return env
def testTableTransformColor(self): self.StartEnvPatch({}) encoding.SetEncodedValue(os.environ, 'TERM', 'xterm') console_attr.GetConsoleAttr('utf8', reset=True) self.Print(projection='(status.color(red=FAIL|ERROR,yellow=WARNING,' 'green=PASS|OK):label=STATUS)') self.AssertOutputEquals( textwrap.dedent("""\ STATUS \x1b[32mPASS \x1b[39;0m \x1b[32mOK \x1b[39;0m \x1b[31;1mFAIL \x1b[39;0m \x1b[31;1mERROR \x1b[39;0m \x1b[33;1mWARNING\x1b[39;0m UNKNOWN """))
def testSetEncodedValueAscii(self): d = {} self.assertEqual(d, {}) value = 'ascii' encoding.SetEncodedValue(d, 'foo', value) raw = d['foo'] # If we're in python 3, the raw value is not encoded. In python 2, the raw # value is encoded. self.assertEqual(six.PY3, isinstance(raw, six.text_type)) actual = encoding.GetEncodedValue(d, 'foo') self.assertTrue(isinstance(actual, six.text_type)) self.assertEqual(value, actual) actual = encoding.GetEncodedValue(d, 'bar') self.assertEqual(None, actual) actual = encoding.GetEncodedValue(d, 'bar', '') self.assertEqual('', actual)
def testSetEncodedValueDict(self): d = {} self.assertEqual(d, {}) encoding.SetEncodedValue(d, 'foo', '1') self.assertEqual(d, {'foo': '1'}) encoding.SetEncodedValue(d, 'foo', '0') self.assertEqual(d, {'foo': '0'}) encoding.SetEncodedValue(d, 'foo', None) self.assertEqual(d, {}) encoding.SetEncodedValue(d, 'foo', None) self.assertEqual(d, {}) encoding.SetEncodedValue(d, 'foo', '1') encoding.SetEncodedValue(d, 'bar', '2') self.assertEqual(d, {'foo': '1', 'bar': '2'}) encoding.SetEncodedValue(d, 'bar', None) self.assertEqual(d, {'foo': '1'})
def ReportMetrics(self, wait_for_report=False): """Reports the collected metrics using a separate async process.""" if not self._metrics: return temp_metrics_file = tempfile.NamedTemporaryFile(delete=False) with temp_metrics_file: pickle.dump(self._metrics, temp_metrics_file) self._metrics = [] this_file = encoding.Decode(__file__) reporting_script_path = os.path.realpath( os.path.join(os.path.dirname(this_file), 'metrics_reporter.py')) execution_args = execution_utils.ArgsForPythonTool( reporting_script_path, temp_metrics_file.name) # On Python 2.x on Windows, the first arg can't be unicode. We encode # encode it anyway because there is really nothing else we can do if # that happens. # https://bugs.python.org/issue19264 execution_args = [encoding.Encode(a) for a in execution_args] exec_env = os.environ.copy() encoding.SetEncodedValue(exec_env, 'PYTHONPATH', os.pathsep.join(sys.path)) try: p = subprocess.Popen(execution_args, env=exec_env, **self._async_popen_args) log.debug('Metrics reporting process started...') except OSError: # This can happen specifically if the Python executable moves between the # start of this process and now. log.debug('Metrics reporting process failed to start.') if wait_for_report: # NOTE: p.wait() can cause a deadlock. p.communicate() is recommended. # See python docs for more information. p.communicate() log.debug('Metrics reporting process finished.')
def RunScenario(self, scenario): """Runs `gcloud meta test <scenario>`.""" # First get the path of the python gcloud main module. # This could be different in unit and bundled tests. sdk_path = googlecloudsdk.__file__ for _ in range(2): sdk_path = os.path.dirname(sdk_path) gcloud = os.path.join(sdk_path, 'gcloud.py') if os.path.exists(gcloud): break # Make sure the gcloud main imports are visible. env = os.environ.copy() encoding.SetEncodedValue(env, 'PYTHONPATH', os.pathsep.join(sys.path)) # Subprocess stderr=X redirection requires file streams, not buffers. # stderr=subprocess.PIPE only works resliably with p=subprocess.Popen() and # p.communicate(), but that messes up meta test signal delivery by absorbing # it -- we would never see it here. try: stderr_path = os.path.join(self.temp_path, 'stderr') stderr = open(stderr_path, 'w') # Here we do not disable site packages, since these tests can run under # virtual env where not all packages are vendored. subprocess.check_call( [sys.executable, gcloud, 'meta', 'test', scenario], stderr=stderr, env=env) finally: stderr.close() stderr = open(stderr_path, 'r') # Write the subprocess stderr to the buffer stream used by the # WithOutputCapture mixin. sys.stderr.write(stderr.read()) stderr.close() os.remove(stderr_path)
def RunPredict(model_dir, json_instances=None, text_instances=None, framework='tensorflow', signature_name=None): """Run ML Engine local prediction.""" instances = predict_utilities.ReadInstancesFromArgs( json_instances, text_instances) sdk_root = config.Paths().sdk_root if not sdk_root: raise LocalPredictEnvironmentError( 'You must be running an installed Cloud SDK to perform local ' 'prediction.') # Inheriting the environment preserves important variables in the child # process. In particular, LD_LIBRARY_PATH under linux and PATH under windows # could be used to point to non-standard install locations of CUDA and CUDNN. # If not inherited, the child process could fail to initialize Tensorflow. env = os.environ.copy() encoding.SetEncodedValue(env, 'CLOUDSDK_ROOT', sdk_root) # We want to use whatever the user's Python was, before the Cloud SDK started # changing the PATH. That's where Tensorflow is installed. python_executables = files.SearchForExecutableOnPath('python') # Need to ensure that ml_sdk is in PYTHONPATH for the import in # local_predict to succeed. orig_py_path = encoding.GetEncodedValue(env, 'PYTHONPATH') or '' if orig_py_path: orig_py_path = ':' + orig_py_path encoding.SetEncodedValue( env, 'PYTHONPATH', os.path.join(sdk_root, 'lib', 'third_party', 'ml_sdk') + orig_py_path) if not python_executables: # This doesn't have to be actionable because things are probably beyond help # at this point. raise LocalPredictEnvironmentError( 'Something has gone really wrong; we can\'t find a valid Python ' 'executable on your PATH.') # Use python found on PATH or local_python override if set python_executable = (properties.VALUES.ml_engine.local_python.Get() or python_executables[0]) predict_args = ['--model-dir', model_dir, '--framework', framework] if signature_name: predict_args += ['--signature-name', signature_name] # Start local prediction in a subprocess. args = [ encoding.Encode(a) for a in ([python_executable, local_predict.__file__] + predict_args) ] proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) # Pass the instances to the process that actually runs local prediction. for instance in instances: proc.stdin.write((json.dumps(instance) + '\n').encode('utf-8')) proc.stdin.flush() # Get the results for the local prediction. output, err = proc.communicate() if proc.returncode != 0: raise LocalPredictRuntimeError(err) if err: log.warning(err) try: return json.loads(output) except ValueError: raise InvalidReturnValueError('The output for prediction is not ' 'in JSON format: ' + output)
def main(): """Launches gsutil.""" args = [] project, account = bootstrapping.GetActiveProjectAndAccount() pass_credentials = ( properties.VALUES.core.pass_credentials_to_gsutil.GetBool() and not properties.VALUES.auth.disable_credentials.GetBool()) _MaybeAddBotoOption(args, 'GSUtil', 'default_project_id', project) if pass_credentials: # Allow gsutil to only check for the '1' string value, as is done # with regard to the 'CLOUDSDK_WRAPPER' environment variable. encoding.SetEncodedValue( os.environ, 'CLOUDSDK_CORE_PASS_CREDENTIALS_TO_GSUTIL', '1') if account in c_gce.Metadata().Accounts(): # Tell gsutil that it should obtain credentials from the GCE metadata # server for the instance's configured service account. _MaybeAddBotoOption(args, 'GoogleCompute', 'service_account', 'default') # For auth'n debugging purposes, allow gsutil to reason about whether the # configured service account was set in a boto file or passed from here. encoding.SetEncodedValue( os.environ, 'CLOUDSDK_PASSED_GCE_SERVICE_ACCOUNT_TO_GSUTIL', '1') else: legacy_config_path = config.Paths().LegacyCredentialsGSUtilPath(account) # We construct a BOTO_PATH that tacks the config containing our # credentials options onto the end of the list of config paths. We ensure # the other credential options are loaded first so that ours will take # precedence and overwrite them. boto_config = encoding.GetEncodedValue(os.environ, 'BOTO_CONFIG', '') boto_path = encoding.GetEncodedValue(os.environ, 'BOTO_PATH', '') if boto_config: boto_path = os.pathsep.join([boto_config, legacy_config_path]) elif boto_path: boto_path = os.pathsep.join([boto_path, legacy_config_path]) else: path_parts = ['/etc/boto.cfg', os.path.expanduser(os.path.join('~', '.boto')), legacy_config_path] boto_path = os.pathsep.join(path_parts) encoding.SetEncodedValue(os.environ, 'BOTO_CONFIG', None) encoding.SetEncodedValue(os.environ, 'BOTO_PATH', boto_path) # Tell gsutil whether gcloud analytics collection is enabled. encoding.SetEncodedValue( os.environ, 'GA_CID', metrics.GetCIDIfMetricsEnabled()) # Set proxy settings. Note that if these proxy settings are configured in a # boto config file, the options here will be loaded afterward, overriding # them. proxy_params = properties.VALUES.proxy proxy_address = proxy_params.address.Get() if proxy_address: _MaybeAddBotoOption(args, 'Boto', 'proxy', proxy_address) _MaybeAddBotoOption(args, 'Boto', 'proxy_port', proxy_params.port.Get()) _MaybeAddBotoOption(args, 'Boto', 'proxy_rdns', proxy_params.rdns.GetBool()) _MaybeAddBotoOption(args, 'Boto', 'proxy_user', proxy_params.username.Get()) _MaybeAddBotoOption(args, 'Boto', 'proxy_pass', proxy_params.password.Get()) # Set SSL-related settings. disable_ssl = properties.VALUES.auth.disable_ssl_validation.GetBool() _MaybeAddBotoOption(args, 'Boto', 'https_validate_certificates', None if disable_ssl is None else not disable_ssl) _MaybeAddBotoOption(args, 'Boto', 'ca_certificates_file', properties.VALUES.core.custom_ca_certs_file.Get()) # Note that the original args to gsutil will be appended after the args we've # supplied here. bootstrapping.ExecutePythonTool('platform/gsutil', 'gsutil', *args)
def main(): """Launches gsutil.""" project, account = bootstrapping.GetActiveProjectAndAccount() pass_credentials = ( properties.VALUES.core.pass_credentials_to_gsutil.GetBool() and not properties.VALUES.auth.disable_credentials.GetBool()) if pass_credentials and account not in c_gce.Metadata().Accounts(): gsutil_path = config.Paths().LegacyCredentialsGSUtilPath(account) # Allow gsutil to only check for the '1' string value, as is done # with regard to the 'CLOUDSDK_WRAPPER' environment variable. encoding.SetEncodedValue(os.environ, 'CLOUDSDK_CORE_PASS_CREDENTIALS_TO_GSUTIL', '1') boto_config = encoding.GetEncodedValue(os.environ, 'BOTO_CONFIG', '') boto_path = encoding.GetEncodedValue(os.environ, 'BOTO_PATH', '') # We construct a BOTO_PATH that tacks the refresh token config # on the end. if boto_config: boto_path = os.pathsep.join([boto_config, gsutil_path]) elif boto_path: boto_path = os.pathsep.join([boto_path, gsutil_path]) else: path_parts = [ '/etc/boto.cfg', os.path.expanduser(os.path.join('~', '.boto')), gsutil_path ] boto_path = os.pathsep.join(path_parts) encoding.SetEncodedValue(os.environ, 'BOTO_CONFIG', None) encoding.SetEncodedValue(os.environ, 'BOTO_PATH', boto_path) # Tell gsutil whether gcloud analytics collection is enabled. encoding.SetEncodedValue(os.environ, 'GA_CID', metrics.GetCIDIfMetricsEnabled()) args = [] _MaybeAddBotoOption(args, 'GSUtil', 'default_project_id', project) if pass_credentials and account in c_gce.Metadata().Accounts(): # Tell gsutil to look for GCE service accounts. _MaybeAddBotoOption(args, 'GoogleCompute', 'service_account', 'default') proxy_params = properties.VALUES.proxy proxy_address = proxy_params.address.Get() if proxy_address: _MaybeAddBotoOption(args, 'Boto', 'proxy', proxy_address) _MaybeAddBotoOption(args, 'Boto', 'proxy_port', proxy_params.port.Get()) _MaybeAddBotoOption(args, 'Boto', 'proxy_rdns', proxy_params.rdns.GetBool()) _MaybeAddBotoOption(args, 'Boto', 'proxy_user', proxy_params.username.Get()) _MaybeAddBotoOption(args, 'Boto', 'proxy_pass', proxy_params.password.Get()) disable_ssl = properties.VALUES.auth.disable_ssl_validation.GetBool() _MaybeAddBotoOption(args, 'Boto', 'https_validate_certificates', None if disable_ssl is None else not disable_ssl) _MaybeAddBotoOption(args, 'Boto', 'ca_certificates_file', properties.VALUES.core.custom_ca_certs_file.Get()) bootstrapping.ExecutePythonTool('platform/gsutil', 'gsutil', *args)
def testCurrentModeErrors(self, value): with self.assertRaises(updates.Error): encoding.SetEncodedValue(os.environ, updates.UPDATE_MODES_ENV_VAR, value) updates.Mode.FromEnv()