예제 #1
0
def _FindExecutable(exe):
    """Finds the path to an executable.

  Args:
    exe: Name of the executable.

  Returns:
    Path to the executable.
  Raises:
    EnvironmentError: The exeuctable can't be found.
  """
    path = file_utils.FindExecutableOnPath(exe) or _FindOrInstallComponent(exe)
    if not path:
        raise EnvironmentError('Unable to locate %s.' % exe)
    return path
예제 #2
0
def _GetGsutilPath():
    """Determines the path to the gsutil binary."""
    sdk_bin_path = config.Paths().sdk_bin_path
    if not sdk_bin_path:
        # Check if gsutil is located on the PATH.
        gsutil_path = file_utils.FindExecutableOnPath('gsutil')
        if gsutil_path:
            log.debug(
                'Using gsutil found at [{path}]'.format(path=gsutil_path))
            return gsutil_path
        else:
            raise exceptions.ToolException(
                ('A SDK root could not be found. Please '
                 'check your installation.'))
    return os.path.join(sdk_bin_path, 'gsutil')
예제 #3
0
def _GetGsutilPath():
    """Determines the path to the gsutil binary."""
    sdk_bin_path = config.Paths().sdk_bin_path
    if not sdk_bin_path:
        # Check if gsutil is located on the PATH.
        gsutil_path = file_utils.FindExecutableOnPath('gsutil')
        if gsutil_path:
            log.debug(
                'Using gsutil found at [{path}]'.format(path=gsutil_path))
            return gsutil_path
        else:
            raise GsutilError(
                'A path to the storage client `gsutil` could not be '
                'found. Please check your SDK installation.')
    return os.path.join(sdk_bin_path, 'gsutil')
예제 #4
0
def _GetCloudSqlProxyPath():
    """Determines the path to the cloud_sql_proxy binary."""
    sdk_bin_path = config.Paths().sdk_bin_path
    if not sdk_bin_path:
        # Check if cloud_sql_proxy is located on the PATH.
        proxy_path = file_utils.FindExecutableOnPath('cloud_sql_proxy')
        if proxy_path:
            log.debug('Using cloud_sql_proxy found at [{path}]'.format(
                path=proxy_path))
            return proxy_path
        else:
            raise exceptions.ToolException(
                'A Cloud SQL Proxy SDK root could not be found. Please check your '
                'installation.')
    return os.path.join(sdk_bin_path, 'cloud_sql_proxy')
예제 #5
0
def GetGcloudPreferredExecutable(exe):
    """Finds the path to an executable, preferring the gcloud packaged version.

  Args:
    exe: Name of the executable.

  Returns:
    Path to the executable.
  Raises:
    EnvironmentError: The executable can't be found.
  """
    path = _FindOrInstallComponent(exe) or file_utils.FindExecutableOnPath(exe)
    if not path:
        raise EnvironmentError('Unable to locate %s.' % exe)
    return path
예제 #6
0
    def Current(cls):
        """Retrieve the current environment.

    Returns:
      Environment, the active and current environment on this machine.
    """
        if platforms.OperatingSystem.IsWindows():
            commands = Environment._WINDOWS_COMMANDS
            path = _SdkHelperBin()
        else:
            commands = Environment._NIX_COMMANDS
            path = None
        env = Environment()
        for key, cmd in commands.iteritems():
            setattr(env, key, files.FindExecutableOnPath(cmd, path=path))
        return env
예제 #7
0
def _GetGcloudScript():
    """Get name of the gcloud script."""

    if (platforms.OperatingSystem.Current() ==
            platforms.OperatingSystem.WINDOWS):
        gcloud_ext = '.cmd'
    else:
        gcloud_ext = ''

    gcloud_name = 'gcloud'
    gcloud = files.FindExecutableOnPath(gcloud_name, pathext=[gcloud_ext])

    if not gcloud:
        raise GcloudIsNotInPath(
            'Could not verify that gcloud is in the PATH. '
            'Please make sure the Cloud SDK bin folder is in PATH.')
    return gcloud_name + gcloud_ext
예제 #8
0
def More(contents, out=None, prompt=None, check_pager=True):
    """Run a user specified pager or fall back to the internal pager.

  Args:
    contents: The entire contents of the text lines to page.
    out: The output stream, log.out (effectively) if None.
    prompt: The page break prompt.
    check_pager: Checks the PAGER env var and uses it if True.
  """
    if not IsInteractive(output=True):
        if not out:
            out = log.out
        out.write(contents)
        return
    if not out:
        # Rendered help to the log file.
        log.file_only_logger.info(contents)
        # Paging shenanigans to stdout.
        out = sys.stdout
    if check_pager:
        pager = encoding.GetEncodedValue(os.environ, 'PAGER', None)
        if pager == '-':
            # Use the fallback Pager.
            pager = None
        elif not pager:
            # Search for a pager that handles ANSI escapes.
            for command in ('less', 'pager'):
                if files.FindExecutableOnPath(command):
                    pager = command
                    break
        if pager:
            # If the pager is less(1) then instruct it to display raw ANSI escape
            # sequences to enable colors and font embellishments.
            less_orig = encoding.GetEncodedValue(os.environ, 'LESS', None)
            less = '-R' + (less_orig or '')
            encoding.SetEncodedValue(os.environ, 'LESS', less)
            p = subprocess.Popen(pager, stdin=subprocess.PIPE, shell=True)
            enc = console_attr.GetConsoleAttr().GetEncoding()
            p.communicate(input=contents.encode(enc))
            p.wait()
            if less_orig is None:
                encoding.SetEncodedValue(os.environ, 'LESS', None)
            return
    # Fall back to the internal pager.
    console_pager.Pager(contents, out, prompt).Run()
예제 #9
0
def InstallBinaryNoOverrides(binary_name, prompt):
  """Helper method for installing binary dependencies within command execs."""
  console_io.PromptContinue(
      message='Pausing command execution:',
      prompt_string=prompt,
      cancel_on_no=True,
      cancel_string='Aborting component install for {} and command execution.'
      .format(binary_name))
  platform = platforms.Platform.Current()
  update_manager_client = update_manager.UpdateManager(platform_filter=platform)
  update_manager_client.Install([binary_name])

  path_executable = files.FindExecutableOnPath(binary_name)
  if path_executable:
    return path_executable

  raise MissingExecutableException(
      binary_name, '{} binary not installed'.format(binary_name))
예제 #10
0
def _EnsureDockerRunning():
    """Make sure docker is running."""
    docker = file_utils.FindExecutableOnPath('docker')
    if not docker:
        raise RuntimeMissingDependencyError(
            'Cannot locate docker on $PATH. Install docker from '
            'https://docs.docker.com/get-docker/.')
    try:
        # docker info returns 0 if it can connect to the docker daemon and
        # returns a non-zero error code if it cannot. run_subprocess
        # checks raises an error if the process does not return 0.
        run_subprocess.Run([docker, 'info'],
                           timeout_sec=20,
                           show_output=_IsDebug())
    except subprocess.CalledProcessError:
        raise RuntimeMissingDependencyError(
            'Unable to reach docker daemon. Make sure docker is running '
            'and reachable.')
예제 #11
0
  def __init__(self, suite, bin_path=None):
    """Create a new environment by supplying a suite and command directory.

    Args:
      suite: Suite, the suite for this environment.
      bin_path: str, the path where the commands are located. If None, use
          standard $PATH.
    """
    self.suite = suite
    self.bin_path = bin_path
    # So pytype is aware of attributes.
    self.ssh = None
    self.ssh_term = None
    self.scp = None
    self.keygen = None
    for key, cmd in six.iteritems(self.COMMANDS[suite]):
      setattr(self, key, files.FindExecutableOnPath(cmd, path=self.bin_path))
    self.ssh_exit_code = self.SSH_EXIT_CODES[suite]
예제 #12
0
def _GetPrimaryNodeName():
    """Get the primary node name.

  Returns:
    str, the name of the primary node. If running in tensorflow 1.x,
    return 'master'. If running in tensorflow 2.x, return 'chief'.
    If tensorflow is not installed in local envrionment, it will return
    the default name 'master'.
  Raises:
    RuntimeError: if there is no python executable on the user system.
  """
    exe_override = properties.VALUES.ml_engine.local_python.Get()
    python_executable = exe_override or files.FindExecutableOnPath('python')
    if not python_executable:
        raise RuntimeError('No python interpreter found on local machine')
    cmd = [
        python_executable, '-c',
        'import tensorflow as tf; print(tf.version.VERSION)'
    ]
    proc = subprocess.Popen(cmd,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT)
    return_code = proc.wait()
    if return_code != 0:
        log.warning('''
    Cannot import tensorflow under path {}. Using "chief" for cluster setting.
    If this is not intended, Please check if tensorflow is installed. Please also
    verify if the python path used is correct. If not, to change the python path:
    use `gcloud config set ml_engine/local_python $python_path`
    Eg: gcloud config set ml_engine/local_python /usr/bin/python3'''.format(
            python_executable))
        return 'chief'

    tf_version = proc.stdout.read()
    if 'decode' in dir(tf_version):
        tf_version = tf_version.decode('utf-8')
    if tf_version.startswith('1.'):
        return 'master'
    elif tf_version.startswith('2.'):
        return 'chief'
    log.warning(
        'Unexpected tensorflow version {}, using the default primary'
        ' node name, aka "chief" for cluster settings'.format(tf_version))
    return 'chief'
예제 #13
0
def More(contents, out=None, prompt=None, check_pager=True):
  """Run a user specified pager or fall back to the internal pager.

  Args:
    contents: The entire contents of the text lines to page.
    out: The output stream, log.out (effectively) if None.
    prompt: The page break prompt.
    check_pager: Checks the PAGER env var and uses it if True.
  """
  if not IsInteractive(output=True):
    if not out:
      out = log.out
    out.write(contents)
    return
  if not out:
    # Rendered help to the log file.
    log.file_only_logger.info(contents)
    # Paging shenanigans to stdout.
    out = sys.stdout
  if check_pager:
    pager = os.environ.get('PAGER', None)
    if pager == '-':
      # Use the fallback Pager.
      pager = None
    elif not pager:
      # Search for a pager that handles ANSI escapes.
      for command in ('less', 'pager'):
        if files.FindExecutableOnPath(command):
          pager = command
          break
    if pager:
      less = os.environ.get('LESS', None)
      if less is None:
        os.environ['LESS'] = '-R'
      p = subprocess.Popen(pager, stdin=subprocess.PIPE, shell=True)
      encoding = console_attr.GetConsoleAttr().GetEncoding()
      p.communicate(input=contents.encode(encoding))
      p.wait()
      if less is None:
        os.environ.pop('LESS')
      return
  # Fall back to the internal pager.
  console_pager.Pager(contents, out, prompt).Run()
def RequireJavaInstalled(for_text, min_version=7):
  """Require that a certain version of Java is installed.

  Args:
    for_text: str, the text explaining what Java is necessary for.
    min_version: int, the minimum major version to check for.

  Raises:
    JavaError: if a Java executable is not found or has the wrong version.

  Returns:
    str, Path to the Java executable.
  """
  java_path = files.FindExecutableOnPath('java')
  if not java_path:
    raise JavaError('To use the {for_text}, a Java {v}+ JRE must be installed '
                    'and on your system PATH'.format(for_text=for_text,
                                                     v=min_version))
  try:
    output = subprocess.check_output([java_path, '-version'],
                                     stderr=subprocess.STDOUT)
  except subprocess.CalledProcessError:
    raise JavaError('Unable to execute the java that was found on your PATH.'
                    ' The {for_text} requires a Java {v}+ JRE installed and on '
                    'your system PATH'.format(for_text=for_text, v=min_version))

  java_exec_version_error = JavaError(
      'The java executable on your PATH is not a Java {v}+ JRE.'
      ' The {for_text} requires a Java {v}+ JRE installed and on '
      'your system PATH'.format(v=min_version, for_text=for_text))

  match = re.search(r'version "(\d+)\.(\d+)\.', output)
  if not match:
    raise java_exec_version_error

  major_version = int(match.group(1))
  minor_version = int(match.group(2))
  # Java <= 8 used 1.X version format. Java > 8 uses X.* version format.
  if ((major_version == 1 and minor_version < min_version) or
      (major_version > 1 and major_version < min_version)):
    raise java_exec_version_error

  return java_path
예제 #15
0
def _JavaStagingMapper(command_path, descriptor, staging_dir):
    """Map a java staging request to the right args.

  Args:
    command_path: str, path to the jar tool file.
    descriptor: str, path to the `appengine-web.xml`
    staging_dir: str, path to the empty staging dir

  Raises:
    java.JavaError, if Java is not installed.

  Returns:
    [str], args for executable invocation.
  """
    java.CheckIfJavaIsInstalled('local staging for java')
    java_bin = files.FindExecutableOnPath('java')
    app_dir = os.path.dirname(os.path.dirname(descriptor))
    args = ([java_bin, '-classpath', command_path, _JAVA_APPCFG_ENTRY_POINT] +
            _JAVA_APPCFG_STAGE_FLAGS + ['stage', app_dir, staging_dir])
    return args
예제 #16
0
def StartEmulatorProxy(args=None):
    """Starts the emulator reverse proxy, as a context manager.

  Note that this will SIGKILL the process once the context comes back to
  the message, so the caller can and should try to shut down the process
  more gracefully.

  Args:
    args: [str], the arguments to be passed to the relevant script

  Yields:
    The proxy process.
  """
    reverse_proxy_jar = ReverseProxyJar()
    java_path = files.FindExecutableOnPath('java')
    classname = 'com.google.cloudsdk.emulators.EmulatorProxy'
    proc = subprocess.Popen([java_path, '-cp', reverse_proxy_jar, classname] +
                            args,
                            stdout=subprocess.PIPE)
    yield proc
    proc.kill()
예제 #17
0
파일: git.py 프로젝트: saranraju90/multik8s
def _GetGcloudScript(full_path=False):
    """Get name of the gcloud script.

  Args:
    full_path: boolean, True if the gcloud full path should be used if free
      of spaces.

  Returns:
    str, command to use to execute gcloud

  Raises:
    GcloudIsNotInPath: if gcloud is not found in the path
  """

    if (platforms.OperatingSystem.Current() ==
            platforms.OperatingSystem.WINDOWS):
        gcloud_ext = '.cmd'
    else:
        gcloud_ext = ''

    gcloud_name = 'gcloud'
    gcloud = files.FindExecutableOnPath(gcloud_name, pathext=[gcloud_ext])

    if not gcloud:
        raise GcloudIsNotInPath(
            'Could not verify that gcloud is in the PATH. '
            'Please make sure the Cloud SDK bin folder is in PATH.')
    if full_path:
        if not re.match(r'[-a-zA-Z0-9_/]+$', gcloud):
            log.warning(
                textwrap.dedent("""\
          You specified the option to use the full gcloud path in the git
          credential.helper, but the path contains non alphanumberic characters
          so the credential helper may not work correctly."""))
        return gcloud
    else:
        return gcloud_name + gcloud_ext
예제 #18
0
def RunKubectlCommand(args, out_func=None, err_func=None):
  """Shells out a command to kubectl.

  This command should be called within the context of a TemporaryKubeconfig
  context manager in order for kubectl to be configured to access the correct
  cluster.

  Args:
    args: list of strings, command line arguments to pass to the kubectl
        command. Should omit the kubectl command itself. For example, to
        execute 'kubectl get pods', provide ['get', 'pods'].
    out_func: str->None, a function to call with the stdout of the kubectl
        command
    err_func: str->None, a function to call with the stderr of the kubectl
        command

  Raises:
    Error: if kubectl could not be called
    KubectlError: if the invocation of kubectl was unsuccessful
  """
  kubectl_path = files.FindExecutableOnPath(_KUBECTL_COMPONENT_NAME,
                                            config.Paths().sdk_bin_path)
  if kubectl_path is None:
    raise Error(MISSING_KUBECTL_MSG)

  try:
    retval = execution_utils.Exec(
        execution_utils.ArgsForExecutableTool(kubectl_path, *args),
        no_exit=True,
        out_func=out_func,
        err_func=err_func,
        universal_newlines=True)
  except (execution_utils.PermissionError,
          execution_utils.InvalidCommandError) as e:
    raise KubectlError(six.text_type(e))
  if retval:
    raise KubectlError('kubectl returned non-zero status code.')
def CheckForInstalledBinary(binary_name,
                            check_hidden=False,
                            custom_message=None,
                            install_if_missing=False):
  """Check if binary is installed and return path or raise error.

  Prefer the installed component over any version found on path.

  Args:
    binary_name: str, name of binary to search for.
    check_hidden: bool, whether to check hidden components for the binary.
    custom_message: str, custom message to used by
      MissingExecutableException if thrown.
    install_if_missing: bool, if true will prompt user to install binary if
      not found.

  Returns:
    Path to executable if found on path or installed component.

  Raises:
    MissingExecutableException: if executable can not be found or can not be
     installed as a component.
  """
  is_component = CheckBinaryComponentInstalled(binary_name, check_hidden)

  if is_component:
    return os.path.join(config.Paths().sdk_bin_path, binary_name)

  path_executable = files.FindExecutableOnPath(binary_name)
  if path_executable:
    return path_executable

  if install_if_missing:
    return InstallBinaryNoOverrides(
        binary_name, _INSTALL_MISSING_EXEC_PROMPT.format(binary=binary_name))

  raise MissingExecutableException(binary_name, custom_message)
예제 #20
0
def RunEmulatorProxyClient(log_file=None, env=None):
  """Runs proxy client to test running emulator reverse proxy.

  Args:
    log_file: int, a file to reroute stdout and stderr to.
    env: dict, the env for the subprocess.

  Yields:
    the calling subprocess
  """
  reverse_proxy_jar = proxy_util.ReverseProxyJar()
  java_path = files.FindExecutableOnPath('java')
  classname = 'com.google.cloudsdk.emulators.Testing$ProtoClient'
  stdout = log_file if log_file is not None else subprocess.PIPE
  stderr = log_file if log_file is not None else subprocess.PIPE
  proc = subprocess.Popen(
      [java_path, '-cp', reverse_proxy_jar, classname],
      stdout=stdout, stderr=stderr, env=env)
  yield proc
  try:
    proc.kill()
  except OSError:
    # The caller did our dirty work for us
    pass
예제 #21
0
 def _GetManPageCollectorType(cls):
   """Returns the man page collector type."""
   if files.FindExecutableOnPath('man'):
     return _ManCommandCollector
   return _ManUrlCollector
예제 #22
0
  def Run(self, args):
    """Connects to a Cloud SQL instance.

    Args:
      args: argparse.Namespace, The arguments that this command was invoked
          with.

    Returns:
      If no exception is raised this method does not return. A new process is
      started and the original one is killed.
    Raises:
      HttpException: An http error response was received while executing api
          request.
      ToolException: An error other than http error occurred while executing the
          command.
    """
    # TODO(b/62055495): Replace ToolExceptions with specific exceptions.
    client = api_util.SqlClient(api_util.API_VERSION_DEFAULT)
    sql_client = client.sql_client
    sql_messages = client.sql_messages

    validate.ValidateInstanceName(args.instance)
    instance_ref = client.resource_parser.Parse(
        args.instance,
        params={'project': properties.VALUES.core.project.GetOrFail},
        collection='sql.instances')

    acl_name = _WhitelistClientIP(instance_ref, sql_client, sql_messages,
                                  client.resource_parser)

    # Get the client IP that the server sees. Sadly we can only do this by
    # checking the name of the authorized network rule.
    retryer = retry.Retryer(max_retrials=2, exponential_sleep_multiplier=2)
    try:
      instance_info, client_ip = retryer.RetryOnResult(
          _GetClientIP,
          [instance_ref, sql_client, acl_name],
          should_retry_if=lambda x, s: x[1] is None,  # client_ip is None
          sleep_ms=500)
    except retry.RetryException:
      raise exceptions.ToolException('Could not whitelist client IP. Server '
                                     'did not reply with the whitelisted IP.')

    # Check for the mysql or psql executable based on the db version.
    db_type = instance_info.databaseVersion.split('_')[0]
    exe_name = constants.DB_EXE.get(db_type, 'mysql')
    exe = files.FindExecutableOnPath(exe_name)
    if not exe:
      raise exceptions.ToolException(
          '{0} client not found.  Please install a {1} client and make sure '
          'it is in PATH to be able to connect to the database instance.'
          .format(exe_name.title(), exe_name))

    # Check the version of IP and decide if we need to add ipv4 support.
    ip_type = network.GetIpVersion(client_ip)
    if ip_type == network.IP_VERSION_4:
      if instance_info.settings.ipConfiguration.ipv4Enabled:
        ip_address = instance_info.ipAddresses[0].ipAddress
      else:
        # TODO(b/36049930): ask user if we should enable ipv4 addressing
        message = ('It seems your client does not have ipv6 connectivity and '
                   'the database instance does not have an ipv4 address. '
                   'Please request an ipv4 address for this database instance.')
        raise exceptions.ToolException(message)
    elif ip_type == network.IP_VERSION_6:
      ip_address = instance_info.ipv6Address
    else:
      raise exceptions.ToolException('Could not connect to SQL server.')

    # Determine what SQL user to connect with.
    sql_user = constants.DEFAULT_SQL_USER[exe_name]
    if args.user:
      sql_user = args.user

    # We have everything we need, time to party!
    flags = constants.EXE_FLAGS[exe_name]
    sql_args = [exe_name, flags['hostname'], ip_address]
    sql_args.extend([flags['user'], sql_user])
    sql_args.append(flags['password'])

    try:
      log.status.write(
          'Connecting to database with SQL user [{0}].'.format(sql_user))
      execution_utils.Exec(sql_args)
    except OSError:
      log.error('Failed to execute command "{0}"'.format(' '.join(sql_args)))
      log.Print(info_holder.InfoHolder())
예제 #23
0
class UploadDownloadTest(e2e_base.WithServiceAuth):
    """Test uploading a small number of small files to Cloud Storage."""

    _GSUTIL_EXECUTABLE = files.FindExecutableOnPath('gsutil')

    def SetUp(self):
        self.storage_client = storage_api.StorageClient()
        self.files_to_upload = []
        self.object_path = next(
            e2e_utils.GetResourceNameGenerator(prefix='object'))
        self.bucket_name = next(
            e2e_utils.GetResourceNameGenerator(prefix=BUCKET_PREFIX))

    def _AssertFileUploaded(self, bucket_ref, expected_file):
        object_ = storage_util.ObjectReference.FromBucketRef(
            bucket_ref, expected_file)
        try:
            self.storage_client.GetObject(object_)
        except apitools_exceptions.HttpError as err:
            self.fail('Object [{}] not successfully uploaded:\n\n{}'.format(
                object_.ToUrl(), str(err)))

    def _TestUploadAndDownload(self, contents):
        with storage_e2e_util.CloudStorageBucket(self.storage_client,
                                                 self.bucket_name,
                                                 self.Project()) as bucket:
            file_path = self.Touch(self.temp_path,
                                   'test_file',
                                   contents=contents.encode('utf-8'))
            target_obj_ref = storage_util.ObjectReference.FromBucketRef(
                bucket, self.object_path)
            with storage_e2e_util.GcsFile(self.storage_client, file_path,
                                          target_obj_ref):
                self._AssertFileUploaded(bucket, self.object_path)

                download_path = os.path.join(self.temp_path, 'download_file')
                source_obj_ref = storage_util.ObjectReference.FromBucketRef(
                    bucket, self.object_path)
                self.storage_client.CopyFileFromGCS(source_obj_ref,
                                                    download_path)

                # Now download again using ReadObject, the in-memory version of
                # CopyFileFromGCS
                object_ref = storage_util.ObjectReference.FromBucketRef(
                    bucket, self.object_path)
                stream = self.storage_client.ReadObject(object_ref)

        # Check regular file download
        self.AssertFileExists(download_path)
        actual_contents = files.ReadFileContents(download_path)
        self.assertEqual(contents, actual_contents)

        # Check stream download
        self.assertEqual(stream.getvalue().decode('utf-8'), contents)

    def testCopyFileToAndFromGcs(self):
        self._TestUploadAndDownload('test file content.')

    def testCopyFileToAndFromGcs_NonAscii(self):
        self._TestUploadAndDownload('\u0394')

    def testCopyFileToAndFromGcs_LargeFile(self):
        """Tests file uploads that require chunking."""
        # Default chunk size isn't available as a constant
        # Need 10 multiples until issue with copied stream chunks appears.
        file_length = properties.VALUES.storage.chunk_size.GetInt() * 10
        # There's a check for large files accidentally left in the temporary
        # directory; we're okay with it in this case, since this deliberately tests
        # a large file.
        self._dirs_size_limit_method = file_length * 2
        file_path = self.Touch(self.temp_path,
                               'test_file',
                               contents=('.' * file_length))
        with storage_e2e_util.CloudStorageBucket(self.storage_client,
                                                 self.bucket_name,
                                                 self.Project()) as bucket:
            target_obj_ref = storage_util.ObjectReference.FromBucketRef(
                bucket, self.object_path)
            with storage_e2e_util.GcsFile(self.storage_client, file_path,
                                          target_obj_ref):
                self._AssertFileUploaded(bucket, self.object_path)
        # Don't run the download portion of the test as a time-saving measure

    @test_case.Filters.RunOnlyIf(_GSUTIL_EXECUTABLE, 'No gsutil found')
    def testRunGsutilCommand(self):
        self.assertEqual(0, storage_util.RunGsutilCommand('help'))

    @test_case.Filters.RunOnlyIf(_GSUTIL_EXECUTABLE, 'No gsutil found')
    def testGsutilCopy(self):
        file_length = 1024
        file_path = self.Touch(self.temp_path,
                               'test_file',
                               contents=('.' * file_length))
        with storage_e2e_util.CloudStorageBucket(self.storage_client,
                                                 self.bucket_name,
                                                 self.Project()) as bucket:
            # Test upload
            object_uri = 'gs://{bucket}/{object_path}'.format(
                bucket=self.bucket_name, object_path=self.object_path)
            exit_code = storage_util.RunGsutilCommand('cp',
                                                      [file_path, object_uri])
            self.assertEqual(0, exit_code)
            self._AssertFileUploaded(bucket, self.object_path)

            # Test download
            download_file_path = os.path.join(self.temp_path, 'download_file')
            download_exit_code = storage_util.RunGsutilCommand(
                'cp', [object_uri, download_file_path])
            self.assertEqual(0, download_exit_code)
            self.assertTrue(os.path.exists(download_file_path))
            self.assertTrue(filecmp.cmp(file_path, download_file_path))

            # Try to clean up
            obj_ref = storage_util.ObjectReference.FromUrl(object_uri)
            self.storage_client.DeleteObject(obj_ref)
예제 #24
0
        def __call__(self, parser, namespace, values, option_string=None):
            alias_args = ['gcloud']

            # subprocess.call() below handles 'shell not found' diagnostics
            if values:
                shell = values
            else:
                shell = os.environ.get('SHELL')
                if not shell:
                    # search for a default for SHELL, biased from left to right
                    for shell in ['bash', 'ksh', 'sh', 'zsh', 'dash']:
                        path = file_utils.FindExecutableOnPath(shell)
                        if path:
                            shell = path
                            break
                    else:
                        shell = 'sh'

            for arg in cli.argv:
                if arg == '--shell' or arg.startswith('--shell='):
                    # Only things up to, and not including, the first --shell.
                    # TODO(user): This search can have false positives. eg,
                    # $ gcloud --project --shell auth --shell
                    # If someone somehow had a project "--shell", or if some other flag
                    # flag value was legitimately "--shell". For now, we'll let this be
                    # a problematic, but rare, corner case.
                    break

                # TODO(user): Make this quoting more robust.
                if ' ' in arg:
                    arg = '"{arg}"'.format(arg=arg)

                alias_args.append(arg)

            alias_prefix = ' '.join(alias_args)
            prompt = ' '.join(['gcloud'] + alias_args[1:])
            interactive = sys.stdin.isatty()
            buf = StringIO.StringIO()
            GenerateRcFile(alias_prefix, prompt, subcommands, interactive, buf)

            exit_code = 0
            with file_utils.TemporaryDirectory() as tmpdir:
                # link or symlink not available on all targets so we make N copies.
                envfile = '.gcloudenv'
                for rcfile in ['.bashrc', '.zshrc', envfile]:
                    path = os.path.join(tmpdir, rcfile)
                    with open(path, 'w') as f:
                        f.write(buf.getvalue())
                        f.flush()
                try:
                    restore = ''
                    for name in ['HOME', 'ZDOTDIR', 'ENV']:
                        val = os.environ.get(name)
                        if val is not None:
                            restore += ' ' + name + "='" + val + "'"
                    if restore is not '':
                        restore = 'export' + restore
                    env = dict(os.environ)
                    env['_GCLOUD_RESTORE_'] = restore
                    env['HOME'] = tmpdir
                    env['ZDOTDIR'] = tmpdir
                    if os.sep == '\\':
                        # Workaround for UWIN ksh(1) PATH lookup on pure windows paths.
                        # The embeded '/' means "this is literal, don't do PATH lookup".
                        # Also handles the eval of $ENV that eliminates \'s.
                        env['ENV'] = '/'.join([tmpdir,
                                               envfile]).replace('\\', '\\\\')
                    else:
                        env['ENV'] = path
                    # Why print terminal escape sequences if stdout is not a terminal?
                    if not sys.stdout.isatty():
                        env['TERM'] = 'dumb'
                    cmd = [shell, '-i']
                    # not interactive implies batch mode with commands on stdin. Since zsh
                    # insists on reading from /dev/tty we stuff it in a new sesssion which
                    # detaches /dev/tty and forces it to read from stdin.  bash and dash
                    # complain about no tty in -i mode, so zsh is special-cased.
                    if not interactive and os.path.basename(shell).startswith(
                            'zsh'):
                        # eventually change preexec_fn=os.setsid to start_new_session=True
                        exit_code = subprocess.call(cmd,
                                                    env=env,
                                                    preexec_fn=os.setsid)
                    else:
                        exit_code = subprocess.call(cmd, env=env)
                except OSError as e:
                    log.error("""\
could not run the shell [{shell}] -- \
make sure it is installed and on the system PATH [{e}]\
""".format(e=e, shell=shell))
                    exit_code = 1

            sys.exit(exit_code)
     b'CfesnGxEEzpu66fb4M4S36sCAwEAAQ==\n'
     b'-----END PUBLIC KEY-----\n',
     b'y78flNI4F4I44vxeFq9BaqBXZGjmkELfatGSlsGwOBP9Xmhwh8jkCijdCgYcMEdp'
     b'BWhHFwd50lJyO6wWQPCh6/p3i4nA/mgQ9/3S6EMw+HPKO/IEpv7uOv7j0vT27jDo'
     b'yopEDHU5a7IJctWq8Xyc4euyd/xTG6v8X++UdJL1keS5Ftw3/FZfWt4iaDKzl/EP'
     b'PNvYlh6YdVevxjmmyuHn5gT+y8Qqj++zSe8wMls5t2/sTCcVi8p5ZP0HQvpmQVyA'
     b'wlhNN6WmleE5r2LJpjtfCfHwLOzgGEiS0zGSSAGOSdFwVA7rqCt1xvtYdyrphNXq'
     b'i1vVERDvWox7sCypQeyNkqMvYozqwOw1r0FgMVvgj7WTyT6rhfNmRR88o2+k/Fvc'
     b'TVZZJ0aJe0Hp3KwShq7kklCwWHlNwdt2jRtCO8aHQiPGfEzpBBzJYAgmLq8xX7Ez'
     b'J+ht1MVjMTIC//dkfUSymYgrVcUc7hBOuRpvoxo2Ze5zDsYXUf2Zj5vcBbbM/5bZ'
     b'mHwoE42y0NhAsaxilFybBNdDUpiFs0MMg28HcOUCXoUj5Ax3P4WmcUh4PDGqqenr'
     b'GqCoIp1vkMmrDmQOIxHPhMJT9edx8sYgzbk2fbB5Fty/byMlr6hlPmm7P8/qfctE'
     b'/9b4qoiy+dxDe7rYnP4kCfesnGxEEzpu66fb4M4S36s=')
]

_OPEN_SSL_EXECUTABLE = files.FindExecutableOnPath('openssl')


@test_case.Filters.RunOnlyIf(_OPEN_SSL_EXECUTABLE, 'No openssl found')
@test_case.Filters.DoNotRunOnWindows(
    'Windows uses Windows Crypto APIs instead of OpenSSL')
class OpensslEncryptionUtilsTest(test_case.TestCase):

  def SetUp(self):
    self.crypt = openssl_encryption_utils.OpensslCrypt(_OPEN_SSL_EXECUTABLE)

  def testStripKey(self):
    expected_output = (
        b'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvjrnuNYG8QtewB/mcbS9'
        b'uqyN3gCG+RulSv4FZAFVib+nbdFpQemcFHfKVk3tve+J24U3TW87B49mat3tvPPl'
        b's5y0N6esYEDyvaynP5ELdQWGSf9V9Kr6b2yBreyq03WpzlMRdLZ7Mj8W6SzK9BpJ'
예제 #26
0
    def Run(self, args):
        start = time_utils.CurrentTimeSec()

        # Set up Encryption utilities.
        openssl_executable = files.FindExecutableOnPath('openssl')
        if windows_encryption_utils:
            crypt = windows_encryption_utils.WinCrypt()
        elif openssl_executable:
            crypt = openssl_encryption_utils.OpensslCrypt(openssl_executable)
        else:
            raise utils.MissingDependencyError(
                'Your platform does not support OpenSSL.')

        # Get Authenticated email address and default username.
        email = gaia_utils.GetAuthenticatedGaiaEmail(self.http)
        if args.user:
            user = args.user
        else:
            user = gaia_utils.MapGaiaEmailToDefaultAccountName(email)

        if args.name == user:
            raise utils.InvalidUserError(
                MACHINE_USERNAME_SAME_ERROR.format(user, args.name))

        # Warn user (This warning doesn't show for non-interactive sessions).
        message = RESET_PASSWORD_WARNING.format(user)
        prompt_string = (
            'Would you like to set or reset the password for [{0}]'.format(
                user))
        console_io.PromptContinue(message=message,
                                  prompt_string=prompt_string,
                                  cancel_on_no=True)

        log.status.Print(
            'Resetting and retrieving password for [{0}] on [{1}]'.format(
                user, args.name))

        # Get Encryption Keys.
        key = crypt.GetKeyPair()
        modulus, exponent = crypt.GetModulusExponentFromPublicKey(
            crypt.GetPublicKey(key))

        # Create Windows key entry.
        self.windows_key_entry = self._ConstructWindowsKeyEntry(
            user, modulus, exponent, email)

        # Call ReadWriteCommad.Run() which will fetch the instance and update
        # the metadata (using the data in self.windows_key_entry).
        objects = super(ResetWindowsPassword, self).Run(args)
        updated_instance = list(objects)[0]

        # Retrieve and Decrypt the password from the serial console.
        enc_password = self._GetEncryptedPasswordFromSerialPort(modulus)
        password = crypt.DecryptMessage(key, enc_password)

        # Get External IP address.
        try:
            access_configs = updated_instance['networkInterfaces'][0][
                'accessConfigs']
            external_ip_address = access_configs[0]['natIP']
        except KeyError:
            log.warn(NO_IP_WARNING.format(updated_instance['name']))
            external_ip_address = None

        # Check for old Windows credentials.
        if self.old_metadata_keys:
            log.warn(
                OLD_KEYS_WARNING.format(self.ref.Name(), self.ref.Name(),
                                        self.ref.zone,
                                        ','.join(self.old_metadata_keys)))

        log.info('Total Elapsed Time: {0}'.format(time_utils.CurrentTimeSec() -
                                                  start))

        # The connection info resource.
        connection_info = {
            'username': user,
            'password': password,
            'ip_address': external_ip_address
        }
        return connection_info
예제 #27
0
 def __init__(self, cli, args):
     super(UnknownReferenceMapper, self).__init__(cli, args)
     self.known = files.FindExecutableOnPath(args[0])
예제 #28
0
파일: util.py 프로젝트: bopopescu/CS231n
def CheckKubectlInstalled():
    """Verify that the kubectl component is installed or print a warning."""
    if (not file_utils.FindExecutableOnPath(_KUBECTL_COMPONENT_NAME)
            and not _KubectlInstalledAsComponent()):
        log.warn(MISSING_KUBECTL_MSG)
예제 #29
0
    def Run(self, args):
        message = (
            'A personal authentication session will propagate your personal '
            'credentials to the cluster, so make sure you trust the cluster '
            'and the user who created it.')
        console_io.PromptContinue(
            message=message,
            cancel_on_no=True,
            cancel_string='Enabling session aborted by user')
        dataproc = dp.Dataproc(self.ReleaseTrack())

        cluster_ref = args.CONCEPTS.cluster.Parse()
        project = cluster_ref.projectId
        region = cluster_ref.region
        cluster_name = cluster_ref.clusterName
        get_request = dataproc.messages.DataprocProjectsRegionsClustersGetRequest(
            projectId=project, region=region, clusterName=cluster_name)
        cluster = dataproc.client.projects_regions_clusters.Get(get_request)
        cluster_uuid = cluster.clusterUuid

        if args.access_boundary:
            with files.FileReader(args.access_boundary, mode='r') as abf:
                access_boundary_json = abf.read()
        else:
            access_boundary_json = flags.ProjectGcsObjectsAccessBoundary(
                project)

        openssl_executable = args.openssl_command
        if not openssl_executable:
            try:
                openssl_executable = files.FindExecutableOnPath('openssl')
            except ValueError:
                log.fatal(
                    'Could not find openssl on your system. The enable-session '
                    'command requires openssl to be installed.')

        operation_poller = waiter.CloudOperationPollerNoResources(
            dataproc.client.projects_regions_operations,
            lambda operation: operation.name)
        try:
            cluster_key = clusters.ClusterKey(cluster)
            if not cluster_key:
                raise exceptions.PersonalAuthError(
                    'The cluster {} does not support personal auth.'.format(
                        cluster_name))

            with progress_tracker.ProgressTracker(
                    'Injecting initial credentials into the cluster {}'.format(
                        cluster_name),
                    autotick=True):
                self.inject_credentials(dataproc, project, region,
                                        cluster_name, cluster_uuid,
                                        cluster_key, access_boundary_json,
                                        openssl_executable, operation_poller)

            if not args.refresh_credentials:
                return

            update_message = (
                'Periodically refreshing credentials for cluster {}. This'
                ' will continue running until the command is interrupted'
            ).format(cluster_name)

            with progress_tracker.ProgressTracker(update_message,
                                                  autotick=True):
                try:
                    # Cluster keys are periodically regenerated, so fetch the latest
                    # each time we inject credentials.
                    cluster = dataproc.client.projects_regions_clusters.Get(
                        get_request)
                    cluster_key = clusters.ClusterKey(cluster)
                    if not cluster_key:
                        raise exceptions.PersonalAuthError(
                            'The cluster {} does not support personal auth.'.
                            format(cluster_name))

                    failure_count = 0
                    while failure_count < 3:
                        try:
                            time.sleep(30)
                            self.inject_credentials(dataproc, project, region,
                                                    cluster_name, cluster_uuid,
                                                    cluster_key,
                                                    access_boundary_json,
                                                    openssl_executable,
                                                    operation_poller)
                            failure_count = 0
                        except ValueError as err:
                            log.error(err)
                            failure_count += 1
                    raise exceptions.PersonalAuthError(
                        'Credential injection failed three times in a row, giving up...'
                    )
                except (console_io.OperationCancelledError, KeyboardInterrupt):
                    return
        except exceptions.PersonalAuthError as err:
            log.error(err)
            return
예제 #30
0
def MakeProcess(module_name,
                package_root,
                args=None,
                cluster=None,
                task_type=None,
                index=None,
                **extra_popen_args):
    """Make a Popen object that runs the module, with the correct env.

  If task_type is primary instead replaces the current process with the
  subprocess via execution_utils.Exec
  Args:
    module_name: str. Name of the module to run, e.g. trainer.task
    package_root: str. Absolute path to the package root for the module.
      used as CWD for the subprocess.
    args: [str]. Additional user args. Any relative paths will not work.
    cluster: dict. Cluster configuration dictionary. Suitable for passing to
      tf.train.ClusterSpec.
    task_type: str. Task type of this process. Only relevant if cluster is
      specified.
    index: int. Task index of this process.
    **extra_popen_args: extra args passed to Popen. Used for testing.
  Returns:
    a subprocess.Popen object corresponding to the subprocesses or an int
    corresponding to the return value of the subprocess
    (if task_type is primary)
  Raises:
    RuntimeError: if there is no python executable on the user system
  """
    if args is None:
        args = []
    exe_override = properties.VALUES.ml_engine.local_python.Get()
    python_executable = exe_override or files.FindExecutableOnPath('python')
    if not python_executable:
        raise RuntimeError('No python interpreter found on local machine')
    cmd = [python_executable, '-m', module_name] + args
    config = {
        'job': {
            'job_name': module_name,
            'args': args
        },
        'task': {
            'type': task_type,
            'index': index
        } if cluster else {},
        'cluster': cluster or {},
        'environment': 'cloud'
    }
    log.info(('launching training process:\n'
              'command: {cmd}\n config: {config}').format(cmd=' '.join(cmd),
                                                          config=json.dumps(
                                                              config,
                                                              indent=2,
                                                              sort_keys=True)))

    env = os.environ.copy()
    # the tf_config environment variable is used to pass the tensorflow
    # configuration options to the training module. the module specific
    # arguments are passed as command line arguments.
    env['TF_CONFIG'] = json.dumps(config)
    if task_type == _GetPrimaryNodeName():
        return execution_utils.Exec(cmd,
                                    env=env,
                                    no_exit=True,
                                    cwd=package_root,
                                    **extra_popen_args)
    else:
        env = encoding.EncodeEnv(env)
        task = subprocess.Popen(cmd,
                                env=env,
                                cwd=package_root,
                                **extra_popen_args)
        atexit.register(execution_utils.KillSubprocess, task)
        return task