def run(self): data_dir = Path(self.job.data_dir).as_posix() logger.debug(f"create remote data dir {data_dir}") with ssh_utils.sftp_client(**self.connections) as sftp_client: logger.debug(f"create remote job output dir {data_dir} ") ssh_utils.makedirs(sftp_client, data_dir) logger.debug(f"prepare to upload assets to {self.machine.hostname}") self.prepare_assets() # create run shell file fd_run_file, run_file = tempfile.mkstemp( prefix=f'hyperctl_run_{self.job.name}_', suffix='.sh') os.close(fd_run_file) self._write_run_shell_script(run_file) # copy file to remote with ssh_utils.sftp_client(**self.connections) as sftp_client: logger.debug(f'upload {run_file} to {self.job.run_file}') sftp_client: SFTPClient = sftp_client ssh_utils.upload_file(sftp_client, run_file, self.job.run_file) # execute command in async self._command_ssh_client = ssh_utils.create_ssh_client( **self.connections) command = f'sh {self.job.run_file}' logger.debug(f'execute command {command}') self._remote_process = self._command_ssh_client.exec_command( command, get_pty=True)
def run_upload_file(self, remote_file): print('remote_file') print(remote_file) with ssh_utils.sftp_client(**self.ssh_config) as client: ssh_utils.upload_file(client, self.file_a, remote_file) # check file in remote assert ssh_utils.exists(client, remote_file)
def test_makedirs(): ssh_config = load_ssh_psw_config() with ssh_utils.sftp_client(**ssh_config) as client: p1 = common_util.generate_short_id() p2 = common_util.generate_short_id() r_path = (Path("/tmp") / p1 / p2).as_posix() print(f"made {r_path}") assert not ssh_utils.exists(client, r_path) ssh_utils.makedirs(client, r_path) assert ssh_utils.exists(client, r_path)
def upload_dir(self): with ssh_utils.sftp_client(**self.ssh_config) as client: # check file in remote p1 = common_util.generate_short_id() p2 = common_util.generate_short_id() remote_dir_path = (Path("/tmp") / p1 / p2) remote_dir = remote_dir_path.as_posix() ssh_utils.upload_dir(client, self.data_dir, remote_dir) return remote_dir
def test_makedirs(): ssh_config = get_ssh_test_config(use_password=True, use_rsa_file=False)[0] with ssh_utils.sftp_client(**ssh_config) as client: p1 = common_util.generate_short_id() p2 = common_util.generate_short_id() r_path = (Path("/tmp") / p1 / p2).as_posix() print(f"made {r_path}") assert not ssh_utils.exists(client, r_path) ssh_utils.makedirs(client, r_path) assert ssh_utils.exists(client, r_path)
def run(self): # check remote host setting daemon_host = get_context().batch.daemon_conf.host if consts.HOST_LOCALHOST == daemon_host: logger.warning( "recommended that set IP address that can be accessed in remote machines, " "but now it's \"localhost\", and the task executed on the remote machines " "may fail because it can't get information from the daemon server," " you can set it in `daemon.host` ") # create remote data dir execution_data_dir = Path(self.job.execution.data_dir).as_posix() with ssh_utils.sftp_client(**self.connections) as sftp_client: logger.debug(f"create remote job data dir {execution_data_dir} ") ssh_utils.makedirs(sftp_client, execution_data_dir) # create run shell file fd_run_file, run_file = tempfile.mkstemp( prefix=f'hyperctl_run_{self.job.name}_', suffix='.sh') os.close(fd_run_file) self._write_run_shell_script(run_file) # copy file to remote with ssh_utils.sftp_client(**self.connections) as sftp_client: logger.debug(f'upload {run_file} to {self.job.run_file_path}') sftp_client: SFTPClient = sftp_client ssh_utils.copy_from_local_to_remote(sftp_client, run_file, self.job.run_file_path) # execute command in async self._command_ssh_client = ssh_utils.create_ssh_client( **self.connections) command = f'sh {self.job.run_file_path}' logger.debug(f'execute command {command}') self._remote_process = self._command_ssh_client.exec_command( command, get_pty=True)
def test_upload(): ssh_config = load_ssh_psw_config() # generate temp file fd, fp = tempfile.mkstemp() os.close(fd) # upload with ssh_utils.sftp_client(**ssh_config) as client: p1 = common_util.generate_short_id() p2 = common_util.generate_short_id() r_path = (Path("/tmp") / p1 / p2 / Path(fp).name).as_posix() # check file in remote ssh_utils.upload_file(client, fp, r_path) assert ssh_utils.exists(client, r_path)
def test_upload(): ssh_config = get_ssh_test_config(use_password=True, use_rsa_file=False)[0] # generate temp file fd, fp = tempfile.mkstemp() os.close(fd) # upload with ssh_utils.sftp_client(**ssh_config) as client: p1 = common_util.generate_short_id() p2 = common_util.generate_short_id() r_path = (Path("/tmp") / p1 / p2 / Path(fp).name).as_posix() # check file in remote ssh_utils.copy_from_local_to_remote(client, fp, r_path) assert ssh_utils.exists(client, r_path)
def prepare_assets(self): if len(self.job.assets) == 0: return with ssh_utils.sftp_client(**self.connections) as sftp_client: for asset in self.job.assets: asset_path = Path(asset).absolute() asset_file = asset_path.as_posix() if not asset_path.exists(): logger.warning( f"local dir {asset_path} not exists, skip to upload") continue if asset_path.is_dir(): ssh_utils.upload_dir(sftp_client, asset_file, self.job.resources_path.as_posix()) else: ssh_utils.upload_file(sftp_client, asset_file, (self.job.resources_path / asset_path.name).as_posix())
def test_upload_dir(self): remote_dir = self.upload_dir() with ssh_utils.sftp_client(**self.ssh_config) as client: remote_dir_path = Path(remote_dir) assert ssh_utils.exists(client, remote_dir) remote_destination_dir_path = remote_dir_path / self.data_dir.name print("remote_destination_dir_path") print(remote_destination_dir_path) assert ssh_utils.exists(client, remote_destination_dir_path.as_posix()) assert ssh_utils.exists(client, (remote_destination_dir_path / "a.txt").as_posix()) assert ssh_utils.exists(client, (remote_destination_dir_path / "empty_dir").as_posix()) assert ssh_utils.exists(client, (remote_destination_dir_path / "sub_dir").as_posix()) assert ssh_utils.exists(client, (remote_destination_dir_path / "sub_dir" / "b.txt").as_posix())
def test_run_batch(self): app = self.app batch = app.batch app.start() job_scheduler = app.job_scheduler assert isinstance(job_scheduler.executor_manager, RemoteSSHExecutorManager) assert len(job_scheduler.executor_manager.machines) == 1 assert_batch_finished(batch, ShellJob.STATUS_SUCCEED) # check assets in remote job1_data_dir_path = Path(batch.jobs[0].data_dir) with ssh_utils.sftp_client(**self.ssh_config) as client: remote_assert_path = job1_data_dir_path / "resources" / self.data_dir.name ssh_utils.exists(client, (remote_assert_path / "empty_dir").as_posix()) ssh_utils.exists(client, (remote_assert_path / "a.txt").as_posix()) ssh_utils.exists(client, (remote_assert_path / "sub_dir" / "b.txt").as_posix())