def _RunCmd(self, cmd, params=None, disable_user_output=True): if not self._cli_power_users_only.IsValidCommand(cmd): log.info('Command %s does not exist.', cmd) return None if params is None: params = [] args = cmd + params log.info('Executing: [gcloud %s]', ' '.join(args)) try: # Disable output from individual commands, so that we get # command run results, and don't clutter output of init. if disable_user_output: args.append('--no-user-output-enabled') if (properties.VALUES.core.verbosity.Get() is None and disable_user_output): # Unless user explicitly set verbosity, suppress from subcommands. args.append('--verbosity=none') if properties.VALUES.core.log_http.GetBool(): args.append('--log-http') # TODO(b/38338044): Remove usage of ExecuteCommandDoNotUse return resource_projector.MakeSerializable( self.ExecuteCommandDoNotUse(args)) except SystemExit as exc: log.info('[%s] has failed\n', ' '.join(cmd + params)) raise c_exc.FailedSubCommand(cmd + params, exc.code) except BaseException: log.info('Failed to run [%s]\n', ' '.join(cmd + params)) raise
def _RunCmd(self, cmd, params=None, disable_user_output=True): if not self.cli.IsValidCommand(cmd): log.info('Command %s does not exist.', cmd) return None if params is None: params = [] args = cmd + params log.info('Executing: [gcloud %s]', ' '.join(args)) try: # Disable output from individual commands, so that we get # command run results, and don't clutter output of init. if disable_user_output: args.append('--no-user-output-enabled') if (properties.VALUES.core.verbosity.Get() is None and disable_user_output): # Unless user explicitly set verbosity, suppress from subcommands. args.append('--verbosity=none') result = self.cli.Execute(args) # Best effort to force result of Execute eagerly. Don't just check # that result is iterable to avoid category errors (e.g., accidently # converting a string or dict to a list). if isinstance(result, types.GeneratorType): return list(result) return result except SystemExit as exc: log.info('[%s] has failed\n', ' '.join(cmd + params)) raise c_exc.FailedSubCommand(cmd + params, exc.code) except BaseException: log.info('Failed to run [%s]\n', ' '.join(cmd + params)) raise
def _UploadToGcsGsutil(local_path, dest_path): """Uploads a local file to GCS using gsutil.""" retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path]) if retcode != 0: log.err.Print('Failed to upload file. See {} for details.'.format( log.GetLogFilePath())) raise exceptions.FailedSubCommand( ['gsutil', 'cp', local_path, dest_path], retcode) return dest_path
def _UploadToGcs(is_async, local_path, daisy_bucket, image_uuid): """Uploads a local file to GCS. Returns the gs:// URI to that file.""" file_name = os.path.basename(local_path).replace(' ', '-') dest_path = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid, file_name) if is_async: log.status.Print('Async: Once upload is complete, your image will be ' 'imported from Cloud Storage asynchronously.') with progress_tracker.ProgressTracker('Copying [{0}] to [{1}]'.format( local_path, dest_path)): retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path]) if retcode != 0: log.err.Print('Failed to upload file. See {} for details.'.format( log.GetLogFilePath())) raise exceptions.FailedSubCommand( ['gsutil', 'cp', local_path, dest_path], retcode) return dest_path
def _PreliminarylyVerifyInstance(self, args, instance, project, user, ip_address): ssh_args = ssh_utils.GetSshArgsForPreliminaryVerification( args, user, instance, ip_address, self.env, self.keys) ssh_return_code = self.ActuallyRun( args, ssh_args, user, instance, project, ip_address, strict_error_checking=False, use_account_service=self._use_accounts_service) if ssh_return_code == 0: return if ssh_return_code == 255: raise core_exceptions.NetworkIssueError( 'Unable to connect to private IP {0}.'.format(ip_address)) if ssh_return_code == 1: raise core_exceptions.NetworkIssueError( 'Established connection with private IP {0} but was unable to ' 'confirm ID of the instance.'.format(ip_address)) raise exceptions.FailedSubCommand(' '.join(ssh_args), ssh_return_code)
def _UploadToGcs(async, local_path, daisy_bucket, image_uuid): """Uploads a local file to GCS. Returns the gs:// URI to that file.""" file_name = os.path.basename(local_path).replace(' ', '-') dest_path = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid, file_name) log.status.Print('\nCopying [{0}] to [{1}]'.format(local_path, dest_path)) if async: log.status.Print( 'Once completed, your image will be imported from Cloud' ' Storage asynchronously.') retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path]) if retcode != 0: log.err.Print('Failed to upload file. See {} for details.'.format( log.GetLogFilePath())) raise exceptions.FailedSubCommand( ['gsutil', 'cp', local_path, dest_path], retcode) return dest_path def _CopyToScratchBucket(source_uri, image_uuid, storage_client, daisy_bucket): """Copy image from source_uri to daisy scratch bucket.""" image_file = os.path.basename(source_uri) dest_uri = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid, image_file) src_object = resources.REGISTRY.Parse(source_uri, collection='storage.objects') dest_object = resources.REGISTRY.Parse(dest_uri, collection='storage.objects') log.status.Print('\nCopying [{0}] to [{1}]'.format(source_uri, dest_uri)) storage_client.Rewrite(src_object, dest_object) return dest_uri