def __init__(self, service_name, configs=None): """ Creates a new SSHDeployer, typical use case is to create one deployer per service. The functions of the deployer are driven by configs. The configs can be set in the constructor as defaults and subsequently overridden during each invocation. The following configs are currently supported additional_directories: used during uninstall to remove additional directories see directories_to_clean args: used during start to give args to the start command delay: used during start or stop to add a delay before returning in order to allow the service time to start up directories_to_clean: used during uninstall to removed additional directories env: used during install/start/stop/get_pid to run custom commands with the specified environment executable: the executable that defines this service extract: used during install to now if the executable should be extracted hostname: used during each function to specify the host to execute it on, should be passed per call rather than set by default install_path: the path to install the executable no_copy: used during install to skip the installation step if the executable has already been copied pid_command: used during get_pid if this is specified than the command will be used to determine the pid of the executable use this or pid_file or pid_keyword pid_file: used during get_pid if this is specified than the file will be read to determine the pid of the executable use this or pid_command or pid_keyword pid_keyword: used during get_pid if this is specified than the keyword will be used with pgrep to determine the pid of the executable use this or pid_command or pid_file post_install_cmds: used during install to run custom commands prior to running start start_command: used during start, the command to start the service stop_command: used during stop, the command to stop the service sync: used during start, whether the start command is synchronous or not (Default not) terminate_only: used during stop to terminate the process rather than using the stop command :param service_name: an arbitrary name that can be used to describe the executable :param configs: default configurations for the other methods :return: """ logging.getLogger("paramiko").setLevel(logging.ERROR) self.service_name = service_name self.default_configs = {} if configs is None else configs Deployer.__init__(self)
def __init__(self, service_name, configs=None): """ :param service_name: an arbitrary name that can be used to describe the executable :param configs: default configurations for the other methods :return: """ logging.getLogger("paramiko").setLevel(logging.ERROR) self.service_name = service_name self.default_configs = {} if configs is None else configs Deployer.__init__(self)
def __init__(self, configs={}): """ Instantiates a Samza job deployer that uses run-job.sh and kill-yarn-job.sh to start and stop Samza jobs in a YARN grid. param: configs -- Map of config key/values pairs. These configs will be used as a default whenever overrides are not provided in the methods (install, start, stop, etc) below. """ logging.getLogger("paramiko").setLevel(logging.ERROR) # map from job_id to app_id self.username = runtime.get_username() self.password = runtime.get_password() self.app_ids = {} self.default_configs = configs Deployer.__init__(self)
def test_get_logs(self): """ Tests that we can successfully copy logs from a remote host :return: """ minimial_deployer = Deployer() install_path = '/tmp/test_deployer_get_logs' if not os.path.exists(install_path): os.mkdir(install_path) output_path = '/tmp/test_deployer_get_logs_output' if not os.path.exists(output_path): os.mkdir(output_path) with open(os.path.join(install_path, 'test.log'), 'w') as f: f.write('this is the test log') with open(os.path.join(install_path, 'test.out'), 'w') as f: f.write('this is the test out') with open(os.path.join(install_path, 'test.foo'), 'w') as f: f.write('this is the test foo') minimial_deployer.processes['unique_id'] = Process( 'unique_id', 'service_name', 'localhost', install_path) minimial_deployer.get_logs('unique_id', [os.path.join(install_path, 'test.out')], output_path) assert os.path.exists(os.path.join(output_path, "unique_id-test.out")) shutil.rmtree(output_path) if not os.path.exists(output_path): os.mkdir(output_path) minimial_deployer.get_logs('unique_id', [], output_path, '.*log') assert os.path.exists( os.path.join(output_path, "unique_id_test_deployer_get_logs-test.log")) shutil.rmtree(install_path) shutil.rmtree(output_path)
def test_get_logs(self): """ Tests that we can successfully copy logs from a remote host :return: """ minimial_deployer = Deployer() install_path = "/tmp/test_deployer_get_logs" if not os.path.exists(install_path): os.mkdir(install_path) output_path = "/tmp/test_deployer_get_logs_output" if not os.path.exists(output_path): os.mkdir(output_path) with open(os.path.join(install_path, "test.log"), "w") as f: f.write("this is the test log") with open(os.path.join(install_path, "test.out"), "w") as f: f.write("this is the test out") with open(os.path.join(install_path, "test.foo"), "w") as f: f.write("this is the test foo") minimial_deployer.processes["unique_id"] = Process("unique_id", "service_name", "localhost", install_path) minimial_deployer.get_logs("unique_id", [os.path.join(install_path, "test.out")], output_path) assert os.path.exists(os.path.join(output_path, "unique_id-test.out")) shutil.rmtree(output_path) if not os.path.exists(output_path): os.mkdir(output_path) minimial_deployer.get_logs("unique_id", [], output_path, ".*log") assert os.path.exists(os.path.join(output_path, "unique_id_test_deployer_get_logs-test.log")) shutil.rmtree(install_path) shutil.rmtree(output_path)