示例#1
0
  def get_pid(self, unique_id, configs=None):
    """Gets the pid of the process with `unique_id`.  If the deployer does not know of a process
    with `unique_id` then it should return a value of constants.PROCESS_NOT_RUNNING_PID
    """
    RECV_BLOCK_SIZE = 16
    # the following is necessay to set the configs for this function as the combination of the
    # default configurations and the parameter with the parameter superceding the defaults but
    # not modifying the defaults
    if configs is None:
      configs = {}
    tmp = self.default_configs.copy()
    tmp.update(configs)
    configs = tmp

    if unique_id in self.processes:
      hostname = self.processes[unique_id].hostname
    else:
      return constants.PROCESS_NOT_RUNNING_PID

    if self.processes[unique_id].start_command is None:
      return constants.PROCESS_NOT_RUNNING_PID

    if self.processes[unique_id].pid_file is not None:
      with open_remote_file(hostname, self.processes[unique_id].pid_file,
                            username=runtime.get_username(), password=runtime.get_password()) as pid_file:
        full_output = pid_file.read()
    elif 'pid_file' in configs.keys():
      with open_remote_file(hostname, configs['pid_file'],
                            username=runtime.get_username(), password=runtime.get_password()) as pid_file:
        full_output = pid_file.read()
    else:
      pid_keyword = self.processes[unique_id].start_command
      if self.processes[unique_id].args is not None:
        pid_keyword = "{0} {1}".format(pid_keyword, ' '.join(self.processes[unique_id].args))
      pid_keyword = configs.get('pid_keyword', pid_keyword)
      # TODO(jehrlich): come up with a simpler approach to this
      pid_command = "ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'".format(pid_keyword)
      pid_command = configs.get('pid_command', pid_command)
      non_failing_command = "{0}; if [ $? -le 1 ]; then true;  else false; fi;".format(pid_command)
      env = configs.get("env", {})
      with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
        chan = exec_with_env(ssh, non_failing_command, msg="Failed to get PID", env=env)
      output = chan.recv(RECV_BLOCK_SIZE)
      full_output = output
      while len(output) > 0:
        output = chan.recv(RECV_BLOCK_SIZE)
        full_output += output
    if len(full_output) > 0:
      pids = [int(pid_str) for pid_str in full_output.split('\n') if pid_str.isdigit()]
      if len(pids) > 0:
        return pids

    return constants.PROCESS_NOT_RUNNING_PID
示例#2
0
    def fetch_logs_from_host(hostname, install_path, prefix, logs, directory,
                             pattern):
        """ Static method Copies logs from specified host on the specified install path

    :Parameter hostname the remote host from where we need to fetch the logs
    :Parameter install_path path where the app is installed
    :Parameter prefix prefix used to copy logs. Generall the unique_id of process
    :Parameter logs a list of logs given by absolute path from the remote host
    :Parameter directory the local directory to store the copied logs
    :Parameter pattern a pattern to apply to files to restrict the set of logs copied
    """
        if hostname is not None:
            with get_sftp_client(hostname,
                                 username=runtime.get_username(),
                                 password=runtime.get_password()) as ftp:
                for f in logs:
                    try:
                        mode = ftp.stat(f).st_mode
                    except IOError, e:
                        if e.errno == errno.ENOENT:
                            logger.error("Log file " + f +
                                         " does not exist on " + hostname)
                            pass
                    else:
                        copy_dir(ftp, f, directory, prefix)
                if install_path is not None:
                    copy_dir(ftp, install_path, directory, prefix, pattern)
示例#3
0
  def start(self, unique_id, configs=None):
    """
    Start the service.  If `unique_id` has already been installed the deployer will start the service on that host.
    Otherwise this will call install with the configs. Within the context of this function, only four configs are
    considered
    'start_command': the command to run (if provided will replace the default)
    'args': a list of args that can be passed to the command
    'sync': if the command is synchronous or asynchronous defaults to asynchronous
    'delay': a delay in seconds that might be needed regardless of whether the command returns before the service can
    be started

    :param unique_id:
    :param configs:
    :return: if the command is executed synchronously return the underlying paramiko channel which can be used to get the stdout
    otherwise return the triple stdin, stdout, stderr
    """
    # the following is necessay to set the configs for this function as the combination of the
    # default configurations and the parameter with the parameter superceding the defaults but
    # not modifying the defaults
    if configs is None:
      configs = {}
    tmp = self.default_configs.copy()
    tmp.update(configs)
    configs = tmp

    logger.debug("starting " + unique_id)

    # do not start if already started
    if self.get_pid(unique_id, configs) is not constants.PROCESS_NOT_RUNNING_PID:
      return None

    if unique_id not in self.processes:
      self.install(unique_id, configs)

    hostname = self.processes[unique_id].hostname
    install_path = self.processes[unique_id].install_path

    # order of precedence for start_command and args from highest to lowest:
    # 1. configs
    # 2. from Process
    # 3. from Deployer
    start_command = configs.get('start_command') or self.processes[unique_id].start_command or self.default_configs.get('start_command')
    if start_command is None:
      logger.error("start_command was not provided for unique_id: " + unique_id)
      raise DeploymentError("start_command was not provided for unique_id: " + unique_id)
    args = configs.get('args') or self.processes[unique_id].args or self.default_configs.get('args')
    if args is not None:
      full_start_command = "{0} {1}".format(start_command, ' '.join(args))
    else:
      full_start_command = start_command
    command = "cd {0}; {1}".format(install_path, full_start_command)
    env = configs.get("env", {})
    with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
      exec_with_env(ssh, command, msg="Failed to start", env=env, sync=configs.get('sync', False))

    self.processes[unique_id].start_command = start_command
    self.processes[unique_id].args = args

    if 'delay' in configs:
      time.sleep(configs['delay'])
示例#4
0
  def uninstall(self, unique_id, configs=None):
    """uninstall the service.  If the deployer has not started a service with
    `unique_id` this will raise a DeploymentError.  This considers one config:
    'additional_directories': a list of directories to remove in addition to those provided in the constructor plus
     the install path. This will update the directories to remove but does not override it
    :param unique_id:
    :param configs:
    :return:
    """
    # the following is necessay to set the configs for this function as the combination of the
    # default configurations and the parameter with the parameter superceding the defaults but
    # not modifying the defaults
    if configs is None:
      configs = {}
    tmp = self.default_configs.copy()
    tmp.update(configs)
    configs = tmp

    if unique_id in self.processes:
      hostname = self.processes[unique_id].hostname
    else:
      logger.error("Can't uninstall {0}: process not known".format(unique_id))
      raise DeploymentError("Can't uninstall {0}: process not known".format(unique_id))

    install_path = self.processes[unique_id].install_path
    directories_to_remove = self.default_configs.get('directories_to_clean', [])
    directories_to_remove.extend(configs.get('additional_directories', []))
    if install_path not in directories_to_remove:
      directories_to_remove.append(install_path)
    with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
      for directory_to_remove in directories_to_remove:
        log_output(better_exec_command(ssh, "rm -rf {0}".format(directory_to_remove),
                                       "Failed to remove {0}".format(directory_to_remove)))
示例#5
0
文件: deployer.py 项目: pubnub/Zopkio
    def get_logs(self, unique_id, logs, directory, pattern='^$'):
        """ Copies logs from the remote host that the process is running on to the provided directory

    :Parameter unique_id the unique_id of the process in question
    :Parameter logs a list of logs given by absolute path from the remote host
    :Parameter directory the local directory to store the copied logs
    :Parameter pattern a pattern to apply to files to restrict the set of logs copied
    """
        hostname = self.processes[unique_id].hostname
        install_path = self.processes[unique_id].install_path
        if hostname is not None:
            with get_sftp_client(hostname,
                                 username=runtime.get_username(),
                                 password=runtime.get_password()) as ftp:
                for f in logs:
                    try:
                        mode = ftp.stat(f).st_mode
                    except IOError, e:
                        if e.errno == errno.ENOENT:
                            logger.error("Log file " + f +
                                         " does not exist on " + hostname)
                            pass
                    else:
                        copy_dir(ftp, f, directory, unique_id)
                if install_path is not None:
                    copy_dir(ftp, install_path, directory, unique_id, pattern)
 def __init__(self, host_name, processor_id):
     """
     :param host_name: Represents the host name in which this StreamProcessor will run.
     :param processor_id: Represents the processor_id of StreamProcessor.
     """
     start_cmd = 'export SAMZA_LOG_DIR=\"deploy/{0}\"; export JAVA_OPTS=\"$JAVA_OPTS -Xmx2G\"; ./bin/run-class.sh  org.apache.samza.test.integration.LocalApplicationRunnerMain --config-path ./config/standalone.failure.test.properties --operation run --config processor.id={0} >> /tmp/{0}.log &'
     self.username = runtime.get_username()
     self.password = runtime.get_password()
     self.processor_id = processor_id
     self.host_name = host_name
     self.processor_start_command = start_cmd.format(self.processor_id)
     logger.info('Running processor start command: {0}'.format(
         self.processor_start_command))
     self.deployment_config = {
         'install_path':
         os.path.join(runtime.get_active_config('remote_install_path'),
                      'deploy/{0}'.format(self.processor_id)),
         'executable':
         'samza-test_2.11-1.0.0.tgz',
         'post_install_cmds': [],
         'start_command':
         self.processor_start_command,
         'stop_command':
         '',
         'extract':
         True,
         'sync':
         True,
     }
     self.deployer = adhoc_deployer.SSHDeployer(self.processor_id,
                                                self.deployment_config)
示例#7
0
  def resume(self, unique_id, configs=None):
    """ Issues a sigcont for the specified process

    :Parameter unique_id: the name of the process
    """
    pids = self.get_pid(unique_id, configs)
    if pids != constants.PROCESS_NOT_RUNNING_PID:
      pid_str = ' '.join(str(pid) for pid in pids)
      hostname = self.processes[unique_id].hostname
      with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
        better_exec_command(ssh, "kill -SIGCONT {0}".format(pid_str), "RESUMING PROCESS {0}".format(unique_id))
示例#8
0
文件: deployer.py 项目: arpras/Zopkio
  def _send_signal(self, unique_id, signalno, configs):
    """ Issues a signal for the specified process

    :Parameter unique_id: the name of the process
    """
    pids = self.get_pid(unique_id, configs)
    if pids != constants.PROCESS_NOT_RUNNING_PID:
      pid_str = ' '.join(str(pid) for pid in pids)
      hostname = self.processes[unique_id].hostname
      msg=  Deployer._signalnames.get(signalno,"SENDING SIGNAL %s TO"%signalno)
      with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
        better_exec_command(ssh, "kill -{0} {1}".format(signalno, pid_str), "{0} PROCESS {1}".format(msg, unique_id))
示例#9
0
    def stop(self, unique_id, configs=None):
        """Stop the service.  If the deployer has not started a service with`unique_id` the deployer will raise an Exception
    There are two configs that will be considered:
    'terminate_only': if this config is passed in then this method is the same as terminate(unique_id) (this is also the
    behavior if stop_command is None and not overridden)
    'stop_command': overrides the default stop_command

    :param unique_id:
    :param configs:
    :return:
    """
        # the following is necessay to set the configs for this function as the combination of the
        # default configurations and the parameter with the parameter superceding the defaults but
        # not modifying the defaults
        if configs is None:
            configs = {}
        tmp = self.default_configs.copy()
        tmp.update(configs)
        configs = tmp

        logger.debug("stopping " + unique_id)

        if unique_id in self.processes:
            hostname = self.processes[unique_id].hostname
        else:
            logger.error("Can't stop {0}: process not known".format(unique_id))
            raise DeploymentError(
                "Can't stop {0}: process not known".format(unique_id))

        if configs.get('terminate_only', False):
            self.terminate(unique_id, configs)
        else:
            stop_command = configs.get(
                'stop_command') or self.default_configs.get('stop_command')
            env = configs.get("env", {})
            if stop_command is not None:
                install_path = self.processes[unique_id].install_path
                with get_ssh_client(hostname,
                                    username=runtime.get_username(),
                                    password=runtime.get_password()) as ssh:
                    log_output(
                        exec_with_env(
                            ssh,
                            "cd {0}; {1}".format(install_path, stop_command),
                            msg="Failed to stop {0}".format(unique_id),
                            env=env))
            else:
                self.terminate(unique_id, configs)

        if 'delay' in configs:
            time.sleep(configs['delay'])
示例#10
0
  def kill(self, unique_id, configs=None):
    """ Issues a kill -9 to the specified process
    calls the deployers get_pid function for the process. If no pid_file/pid_keyword is specified
    a generic grep of ps aux command is executed on remote machine based on process parameters
    which may not be reliable if more process are running with similar name

    :Parameter unique_id: the name of the process
    """
    pids = self.get_pid(unique_id, configs)
    if pids != constants.PROCESS_NOT_RUNNING_PID:
      pid_str = ' '.join(str(pid) for pid in pids)
      hostname = self.processes[unique_id].hostname
      with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
        better_exec_command(ssh, "kill -9 {0}".format(pid_str), "KILLING PROCESS {0}".format(unique_id))
示例#11
0
    def pause(self, unique_id, configs=None):
        """ Issues a sigstop for the specified process

    :Parameter unique_id: the name of the process
    """
        pids = self.get_pid(unique_id, configs)
        if pids != constants.PROCESS_NOT_RUNNING_PID:
            pid_str = ' '.join(str(pid) for pid in pids)
            hostname = self.processes[unique_id].hostname
            with get_ssh_client(hostname,
                                username=runtime.get_username(),
                                password=runtime.get_password()) as ssh:
                better_exec_command(ssh, "kill -SIGSTOP {0}".format(pid_str),
                                    "PAUSING PROCESS {0}".format(unique_id))
示例#12
0
  def __init__(self, configs={}):
    """
    Instantiates a Samza job deployer that uses run-job.sh and kill-yarn-job.sh 
    to start and stop Samza jobs in a YARN grid.

    param: configs -- Map of config key/values pairs. These configs will be used
    as a default whenever overrides are not provided in the methods (install, 
    start, stop, etc) below.
    """
    logging.getLogger("paramiko").setLevel(logging.ERROR)
    # map from job_id to app_id
    self.username = runtime.get_username()
    self.password = runtime.get_password()
    self.app_ids = {}
    self.default_configs = configs
    Deployer.__init__(self)
示例#13
0
  def __init__(self, configs={}):
    """
    Instantiates a Samza job deployer that uses run-job.sh and kill-yarn-job.sh 
    to start and stop Samza jobs in a YARN grid.

    param: configs -- Map of config key/values pairs. These configs will be used
    as a default whenever overrides are not provided in the methods (install, 
    start, stop, etc) below.
    """
    logging.getLogger("paramiko").setLevel(logging.ERROR)
    # map from job_id to app_id
    self.username = runtime.get_username()
    self.password = runtime.get_password()
    self.app_ids = {}
    self.default_configs = configs
    Deployer.__init__(self)
示例#14
0
    def _send_signal(self, unique_id, signalno, configs):
        """ Issues a signal for the specified process

    :Parameter unique_id: the name of the process
    """
        pids = self.get_pid(unique_id, configs)
        if pids != constants.PROCESS_NOT_RUNNING_PID:
            pid_str = ' '.join(str(pid) for pid in pids)
            hostname = self.processes[unique_id].hostname
            msg = Deployer._signalnames.get(signalno,
                                            "SENDING SIGNAL %s TO" % signalno)
            with get_ssh_client(hostname,
                                username=runtime.get_username(),
                                password=runtime.get_password()) as ssh:
                better_exec_command(ssh,
                                    "kill -{0} {1}".format(signalno, pid_str),
                                    "{0} PROCESS {1}".format(msg, unique_id))
示例#15
0
文件: deployer.py 项目: pubnub/Zopkio
    def kill(self, unique_id, configs=None):
        """ Issues a kill -9 to the specified process
    calls the deployers get_pid function for the process. If no pid_file/pid_keyword is specified
    a generic grep of ps aux command is executed on remote machine based on process parameters
    which may not be reliable if more process are running with similar name

    :Parameter unique_id: the name of the process
    """
        pids = self.get_pid(unique_id, configs)
        if pids != constants.PROCESS_NOT_RUNNING_PID:
            pid_str = ' '.join(str(pid) for pid in pids)
            hostname = self.processes[unique_id].hostname
            with get_ssh_client(hostname,
                                username=runtime.get_username(),
                                password=runtime.get_password()) as ssh:
                better_exec_command(ssh, "kill -9 {0}".format(pid_str),
                                    "KILLING PROCESS {0}".format(unique_id))
示例#16
0
  def stop(self, unique_id, configs=None):
    """Stop the service.  If the deployer has not started a service with`unique_id` the deployer will raise an Exception
    There are two configs that will be considered:
    'terminate_only': if this config is passed in then this method is the same as terminate(unique_id) (this is also the
    behavior if stop_command is None and not overridden)
    'stop_command': overrides the default stop_command

    :param unique_id:
    :param configs:
    :return:
    """
    # the following is necessay to set the configs for this function as the combination of the
    # default configurations and the parameter with the parameter superceding the defaults but
    # not modifying the defaults
    if configs is None:
      configs = {}
    tmp = self.default_configs.copy()
    tmp.update(configs)
    configs = tmp

    logger.debug("stopping " + unique_id)

    if unique_id in self.processes:
      hostname = self.processes[unique_id].hostname
    else:
      logger.error("Can't stop {0}: process not known".format(unique_id))
      raise DeploymentError("Can't stop {0}: process not known".format(unique_id))

    if configs.get('terminate_only', False):
      self.terminate(unique_id, configs)
    else:
      stop_command = configs.get('stop_command') or self.default_configs.get('stop_command')
      env = configs.get("env", {})
      if stop_command is not None:
        install_path = self.processes[unique_id].install_path
        with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
          log_output(exec_with_env(ssh, "cd {0}; {1}".format(install_path, stop_command),
                                         msg="Failed to stop {0}".format(unique_id), env=env))
      else:
        self.terminate(unique_id, configs)

    if 'delay' in configs:
      time.sleep(configs['delay'])
示例#17
0
    def uninstall(self, unique_id, configs=None):
        """uninstall the service.  If the deployer has not started a service with
    `unique_id` this will raise a DeploymentError.  This considers one config:
    'additional_directories': a list of directories to remove in addition to those provided in the constructor plus
     the install path. This will update the directories to remove but does not override it
    :param unique_id:
    :param configs:
    :return:
    """
        # the following is necessay to set the configs for this function as the combination of the
        # default configurations and the parameter with the parameter superceding the defaults but
        # not modifying the defaults
        if configs is None:
            configs = {}
        tmp = self.default_configs.copy()
        tmp.update(configs)
        configs = tmp

        if unique_id in self.processes:
            hostname = self.processes[unique_id].hostname
        else:
            logger.error(
                "Can't uninstall {0}: process not known".format(unique_id))
            raise DeploymentError(
                "Can't uninstall {0}: process not known".format(unique_id))

        install_path = self.processes[unique_id].install_path
        directories_to_remove = self.default_configs.get(
            'directories_to_clean', [])
        directories_to_remove.extend(configs.get('additional_directories', []))
        if install_path not in directories_to_remove:
            directories_to_remove.append(install_path)
        with get_ssh_client(hostname,
                            username=runtime.get_username(),
                            password=runtime.get_password()) as ssh:
            for directory_to_remove in directories_to_remove:
                log_output(
                    better_exec_command(
                        ssh, "rm -rf {0}".format(directory_to_remove),
                        "Failed to remove {0}".format(directory_to_remove)))
示例#18
0
  def get_logs(self, unique_id, logs, directory, pattern='^$'):
    """ Copies logs from the remote host that the process is running on to the provided directory

    :Parameter unique_id the unique_id of the process in question
    :Parameter logs a list of logs given by absolute path from the remote host
    :Parameter directory the local directory to store the copied logs
    :Parameter pattern a pattern to apply to files to restrict the set of logs copied
    """
    hostname = self.processes[unique_id].hostname
    install_path = self.processes[unique_id].install_path
    if hostname is not None:
      with get_sftp_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ftp:
        for f in logs:
          try:
            mode = ftp.stat(f).st_mode
          except IOError, e:
            if e.errno == errno.ENOENT:
              logger.error("Log file " + f + " does not exist on " + hostname)
              pass
          else:
            copy_dir(ftp, f, directory, unique_id)
        if install_path is not None:
          copy_dir(ftp, install_path, directory, unique_id, pattern)
示例#19
0
文件: deployer.py 项目: arpras/Zopkio
  def fetch_logs_from_host(self, hostname, install_path, prefix, logs, directory, pattern):
    """ Copies logs from any host on the specified install path

    :Parameter hostname the remote host from where we need to fetch the logs
    :Parameter install_path path where the app is installed
    :Parameter prefix prefix used to copy logs. Generall the unique_id of process
    :Parameter logs a list of logs given by absolute path from the remote host
    :Parameter directory the local directory to store the copied logs
    :Parameter pattern a pattern to apply to files to restrict the set of logs copied
    """
    if hostname is not None:
      with get_sftp_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ftp:
        for f in logs:
          try:
            mode = ftp.stat(f).st_mode
          except IOError, e:
            if e.errno == errno.ENOENT:
              logger.error("Log file " + f + " does not exist on " + hostname)
              pass
          else:
            copy_dir(ftp, f, directory, prefix, pattern)
        if install_path is not None:
          copy_dir(ftp, install_path, directory, prefix, pattern)
示例#20
0
  def install(self, unique_id, configs=None):
    """
    Copies the executable to the remote machine under install path. Inspects the configs for the possible keys
    'hostname': the host to install on
    'install_path': the location on the remote host
    'executable': the executable to copy
    'no_copy': if this config is passed in and true then this method will not copy the executable assuming that it is
    already installed
    'post_install_cmds': an optional list of commands that should be executed on the remote machine after the
     executable has been installed. If no_copy is set to true, then the post install commands will not be run.

    If the unique_id is already installed on a different host, this will perform the cleanup action first.
    If either 'install_path' or 'executable' are provided the new value will become the default.

    :param unique_id:
    :param configs:
    :return:
    """

    # the following is necessay to set the configs for this function as the combination of the
    # default configurations and the parameter with the parameter superceding the defaults but
    # not modifying the defaults
    if configs is None:
      configs = {}
    tmp = self.default_configs.copy()
    tmp.update(configs)
    configs = tmp

    hostname = None
    is_tarfile = False
    is_zipfile = False
    if unique_id in self.processes and  'hostname' in configs:
        self.uninstall(unique_id, configs)
        hostname = configs['hostname']
    elif 'hostname' in configs:
      hostname = configs['hostname']
    elif unique_id not in self.processes:
      # we have not installed this unique_id before and no hostname is provided in the configs so raise an error
      raise DeploymentError("hostname was not provided for unique_id: " + unique_id)

    env = configs.get("env", {})
    install_path = configs.get('install_path') or self.default_configs.get('install_path')
    pid_file = configs.get('pid_file') or self.default_configs.get('pid_file')
    if install_path is None:
      logger.error("install_path was not provided for unique_id: " + unique_id)
      raise DeploymentError("install_path was not provided for unique_id: " + unique_id)
    if not configs.get('no_copy', False):
      with get_ssh_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ssh:
        log_output(better_exec_command(ssh, "mkdir -p {0}".format(install_path),
                                       "Failed to create path {0}".format(install_path)))
        log_output(better_exec_command(ssh, "chmod 755 {0}".format(install_path),
                                       "Failed to make path {0} writeable".format(install_path)))
        executable = configs.get('executable') or self.default_configs.get('executable')
        if executable is None:
          logger.error("executable was not provided for unique_id: " + unique_id)
          raise DeploymentError("executable was not provided for unique_id: " + unique_id)

        #if the executable is in remote location copy to local machine
        copy_from_remote_location = False;

        if (":" in executable):
          copy_from_remote_location = True

          if ("http" not in executable):
            remote_location_server = executable.split(":")[0]
            remote_file_path = executable.split(":")[1] 
            remote_file_name = os.path.basename(remote_file_path)

            local_temp_file_name = os.path.join(configs.get("tmp_dir","/tmp"),remote_file_name)
          
            if not os.path.exists(local_temp_file_name):
              with get_sftp_client(remote_location_server,username=runtime.get_username(), password=runtime.get_password()) as ftp:
                try:
                  ftp.get(remote_file_path, local_temp_file_name)
                  executable = local_temp_file_name
                except:
                  raise DeploymentError("Unable to load file from remote server " + executable)
          #use urllib for http copy
          else:    
              remote_file_name = executable.split("/")[-1]
              local_temp_file_name = os.path.join(configs.get("tmp_dir","/tmp"),remote_file_name)
              if not os.path.exists(local_temp_file_name):
                try:
                  urllib.urlretrieve (executable, local_temp_file_name)
                except:
                  raise DeploymentError("Unable to load file from remote server " + executable)
              executable = local_temp_file_name    

        try:                     
          exec_name = os.path.basename(executable)
          install_location = os.path.join(install_path, exec_name)
          with get_sftp_client(hostname, username=runtime.get_username(), password=runtime.get_password()) as ftp:
            ftp.put(executable, install_location)
        except:
            raise DeploymentError("Unable to copy executable to install_location:" + install_location)
        finally:
          #Track if its a tarfile or zipfile before deleting it in case the copy to remote location fails
          is_tarfile = tarfile.is_tarfile(executable)
          is_zipfile = zipfile.is_zipfile(executable)
          if (copy_from_remote_location and not configs.get('cache',False)):
            os.remove(executable)       

        # only supports tar and zip (because those modules are provided by Python's standard library)
        if configs.get('extract', False) or self.default_configs.get('extract', False):
          if is_tarfile:
            log_output(better_exec_command(ssh, "tar -xf {0} -C {1}".format(install_location, install_path),
                                           "Failed to extract tarfile {0}".format(exec_name)))
          elif is_zipfile:
            log_output(better_exec_command(ssh, "unzip -o {0} -d {1}".format(install_location, install_path),
                                           "Failed to extract zipfile {0}".format(exec_name)))
          else:
            logger.error(executable + " is not a supported filetype for extracting")
            raise DeploymentError(executable + " is not a supported filetype for extracting")
        post_install_cmds = configs.get('post_install_cmds', False) or self.default_configs.get('post_install_cmds', [])
        for cmd in post_install_cmds:
          relative_cmd = "cd {0}; {1}".format(install_path, cmd)
          log_output(exec_with_env(ssh, relative_cmd,
                                         msg="Failed to execute post install command: {0}".format(relative_cmd), env=env))
    self.processes[unique_id] = Process(unique_id, self.service_name, hostname, install_path)
    self.processes[unique_id].pid_file = pid_file
示例#21
0
    def start(self, unique_id, configs=None):
        """
    Start the service.  If `unique_id` has already been installed the deployer will start the service on that host.
    Otherwise this will call install with the configs. Within the context of this function, only four configs are
    considered
    'start_command': the command to run (if provided will replace the default)
    'args': a list of args that can be passed to the command
    'sync': if the command is synchronous or asynchronous defaults to asynchronous
    'delay': a delay in seconds that might be needed regardless of whether the command returns before the service can
    be started

    :param unique_id:
    :param configs:
    :return: if the command is executed synchronously return the underlying paramiko channel which can be used to get the stdout
    otherwise return the triple stdin, stdout, stderr
    """
        # the following is necessay to set the configs for this function as the combination of the
        # default configurations and the parameter with the parameter superceding the defaults but
        # not modifying the defaults
        if configs is None:
            configs = {}
        tmp = self.default_configs.copy()
        tmp.update(configs)
        configs = tmp

        logger.debug("starting " + unique_id)

        # do not start if already started
        if self.get_pid(unique_id,
                        configs) is not constants.PROCESS_NOT_RUNNING_PID:
            return None

        if unique_id not in self.processes:
            self.install(unique_id, configs)

        hostname = self.processes[unique_id].hostname
        install_path = self.processes[unique_id].install_path

        # order of precedence for start_command and args from highest to lowest:
        # 1. configs
        # 2. from Process
        # 3. from Deployer
        start_command = configs.get('start_command') or self.processes[
            unique_id].start_command or self.default_configs.get(
                'start_command')
        pid_file = configs.get('pid_file') or self.default_configs.get(
            'pid_file')
        if start_command is None:
            logger.error("start_command was not provided for unique_id: " +
                         unique_id)
            raise DeploymentError(
                "start_command was not provided for unique_id: " + unique_id)
        args = configs.get('args') or self.processes[
            unique_id].args or self.default_configs.get('args')
        if args is not None:
            full_start_command = "{0} {1}".format(start_command,
                                                  ' '.join(args))
        else:
            full_start_command = start_command
        command = "cd {0}; {1}".format(install_path, full_start_command)
        env = configs.get("env", {})
        with get_ssh_client(hostname,
                            username=runtime.get_username(),
                            password=runtime.get_password()) as ssh:
            exec_with_env(ssh,
                          command,
                          msg="Failed to start",
                          env=env,
                          sync=configs.get('sync', False))

        self.processes[unique_id].start_command = start_command
        self.processes[unique_id].args = args
        # For cases where user pases it with start command
        if self.processes[unique_id].pid_file is None:
            self.processes[unique_id].pid_file = pid_file

        if 'delay' in configs:
            time.sleep(configs['delay'])
示例#22
0
    def get_pid(self, unique_id, configs=None):
        """Gets the pid of the process with `unique_id`.  If the deployer does not know of a process
    with `unique_id` then it should return a value of constants.PROCESS_NOT_RUNNING_PID
    """
        RECV_BLOCK_SIZE = 16
        # the following is necessay to set the configs for this function as the combination of the
        # default configurations and the parameter with the parameter superceding the defaults but
        # not modifying the defaults
        if configs is None:
            configs = {}
        tmp = self.default_configs.copy()
        tmp.update(configs)
        configs = tmp

        if unique_id in self.processes:
            hostname = self.processes[unique_id].hostname
        else:
            return constants.PROCESS_NOT_RUNNING_PID

        if self.processes[unique_id].start_command is None:
            return constants.PROCESS_NOT_RUNNING_PID

        if self.processes[unique_id].pid_file is not None:
            with open_remote_file(hostname,
                                  self.processes[unique_id].pid_file,
                                  username=runtime.get_username(),
                                  password=runtime.get_password()) as pid_file:
                full_output = pid_file.read()
        elif 'pid_file' in configs.keys():
            with open_remote_file(hostname,
                                  configs['pid_file'],
                                  username=runtime.get_username(),
                                  password=runtime.get_password()) as pid_file:
                full_output = pid_file.read()
        else:
            pid_keyword = self.processes[unique_id].start_command
            if self.processes[unique_id].args is not None:
                pid_keyword = "{0} {1}".format(
                    pid_keyword, ' '.join(self.processes[unique_id].args))
            pid_keyword = configs.get('pid_keyword', pid_keyword)
            # TODO(jehrlich): come up with a simpler approach to this
            pid_command = "ps aux | grep '{0}' | grep -v grep | tr -s ' ' | cut -d ' ' -f 2 | grep -Eo '[0-9]+'".format(
                pid_keyword)
            pid_command = configs.get('pid_command', pid_command)
            non_failing_command = "{0}; if [ $? -le 1 ]; then true;  else false; fi;".format(
                pid_command)
            env = configs.get("env", {})
            with get_ssh_client(hostname,
                                username=runtime.get_username(),
                                password=runtime.get_password()) as ssh:
                chan = exec_with_env(ssh,
                                     non_failing_command,
                                     msg="Failed to get PID",
                                     env=env)
            output = chan.recv(RECV_BLOCK_SIZE)
            full_output = output
            while len(output) > 0:
                output = chan.recv(RECV_BLOCK_SIZE)
                full_output += output
        if len(full_output) > 0:
            pids = [
                int(pid_str) for pid_str in full_output.split('\n')
                if pid_str.isdigit()
            ]
            if len(pids) > 0:
                return pids

        return constants.PROCESS_NOT_RUNNING_PID
示例#23
0
    def install(self, unique_id, configs=None):
        """
    Copies the executable to the remote machine under install path. Inspects the configs for the possible keys
    'hostname': the host to install on
    'install_path': the location on the remote host
    'executable': the executable to copy
    'no_copy': if this config is passed in and true then this method will not copy the executable assuming that it is
    already installed
    'post_install_cmds': an optional list of commands that should be executed on the remote machine after the
     executable has been installed. If no_copy is set to true, then the post install commands will not be run.

    If the unique_id is already installed on a different host, this will perform the cleanup action first.
    If either 'install_path' or 'executable' are provided the new value will become the default.

    :param unique_id:
    :param configs:
    :return:
    """

        # the following is necessay to set the configs for this function as the combination of the
        # default configurations and the parameter with the parameter superceding the defaults but
        # not modifying the defaults
        if configs is None:
            configs = {}
        tmp = self.default_configs.copy()
        tmp.update(configs)
        configs = tmp

        hostname = None
        is_tarfile = False
        is_zipfile = False
        if unique_id in self.processes and 'hostname' in configs:
            self.uninstall(unique_id, configs)
            hostname = configs['hostname']
        elif 'hostname' in configs:
            hostname = configs['hostname']
        elif unique_id not in self.processes:
            # we have not installed this unique_id before and no hostname is provided in the configs so raise an error
            raise DeploymentError("hostname was not provided for unique_id: " +
                                  unique_id)

        env = configs.get("env", {})
        install_path = configs.get('install_path') or self.default_configs.get(
            'install_path')
        pid_file = configs.get('pid_file') or self.default_configs.get(
            'pid_file')
        if install_path is None:
            logger.error("install_path was not provided for unique_id: " +
                         unique_id)
            raise DeploymentError(
                "install_path was not provided for unique_id: " + unique_id)
        if not configs.get('no_copy', False):
            with get_ssh_client(hostname,
                                username=runtime.get_username(),
                                password=runtime.get_password()) as ssh:
                log_output(
                    better_exec_command(
                        ssh, "mkdir -p {0}".format(install_path),
                        "Failed to create path {0}".format(install_path)))
                log_output(
                    better_exec_command(
                        ssh, "chmod 755 {0}".format(install_path),
                        "Failed to make path {0} writeable".format(
                            install_path)))
                executable = configs.get(
                    'executable') or self.default_configs.get('executable')
                if executable is None:
                    logger.error(
                        "executable was not provided for unique_id: " +
                        unique_id)
                    raise DeploymentError(
                        "executable was not provided for unique_id: " +
                        unique_id)

                #if the executable is in remote location copy to local machine
                copy_from_remote_location = False

                if (":" in executable):
                    copy_from_remote_location = True

                    if ("http" not in executable):
                        remote_location_server = executable.split(":")[0]
                        remote_file_path = executable.split(":")[1]
                        remote_file_name = os.path.basename(remote_file_path)

                        local_temp_file_name = os.path.join(
                            configs.get("tmp_dir", "/tmp"), remote_file_name)

                        if not os.path.exists(local_temp_file_name):
                            with get_sftp_client(
                                    remote_location_server,
                                    username=runtime.get_username(),
                                    password=runtime.get_password()) as ftp:
                                try:
                                    ftp.get(remote_file_path,
                                            local_temp_file_name)
                                    executable = local_temp_file_name
                                except:
                                    raise DeploymentError(
                                        "Unable to load file from remote server "
                                        + executable)
                    #use urllib for http copy
                    else:
                        remote_file_name = executable.split("/")[-1]
                        local_temp_file_name = os.path.join(
                            configs.get("tmp_dir", "/tmp"), remote_file_name)
                        if not os.path.exists(local_temp_file_name):
                            try:
                                urllib.urlretrieve(executable,
                                                   local_temp_file_name)
                            except:
                                raise DeploymentError(
                                    "Unable to load file from remote server " +
                                    executable)
                        executable = local_temp_file_name

                try:
                    exec_name = os.path.basename(executable)
                    install_location = os.path.join(install_path, exec_name)
                    with get_sftp_client(
                            hostname,
                            username=runtime.get_username(),
                            password=runtime.get_password()) as ftp:
                        ftp.put(executable, install_location)
                except:
                    raise DeploymentError(
                        "Unable to copy executable to install_location:" +
                        install_location)
                finally:
                    #Track if its a tarfile or zipfile before deleting it in case the copy to remote location fails
                    is_tarfile = tarfile.is_tarfile(executable)
                    is_zipfile = zipfile.is_zipfile(executable)
                    if (copy_from_remote_location
                            and not configs.get('cache', False)):
                        os.remove(executable)

                # only supports tar and zip (because those modules are provided by Python's standard library)
                if configs.get('extract', False) or self.default_configs.get(
                        'extract', False):
                    if is_tarfile:
                        log_output(
                            better_exec_command(
                                ssh, "tar -xf {0} -C {1}".format(
                                    install_location, install_path),
                                "Failed to extract tarfile {0}".format(
                                    exec_name)))
                    elif is_zipfile:
                        log_output(
                            better_exec_command(
                                ssh, "unzip -o {0} -d {1}".format(
                                    install_location, install_path),
                                "Failed to extract zipfile {0}".format(
                                    exec_name)))
                    else:
                        logger.error(
                            executable +
                            " is not a supported filetype for extracting")
                        raise DeploymentError(
                            executable +
                            " is not a supported filetype for extracting")
                post_install_cmds = configs.get(
                    'post_install_cmds', False) or self.default_configs.get(
                        'post_install_cmds', [])
                for cmd in post_install_cmds:
                    relative_cmd = "cd {0}; {1}".format(install_path, cmd)
                    log_output(
                        exec_with_env(
                            ssh,
                            relative_cmd,
                            msg="Failed to execute post install command: {0}".
                            format(relative_cmd),
                            env=env))
        self.processes[unique_id] = Process(unique_id, self.service_name,
                                            hostname, install_path)
        self.processes[unique_id].pid_file = pid_file