def run_commands(*cmds, sep: str = '&&'): lst = [] for i, c in enumerate(cmds): lst.extend(c) if i != len(cmds) - 1: lst.append(sep) run_command(lst)
def test_job(self): alias = self.cluster_ut["cluster-alias"] # cluster delete run_test_command("opai cluster delete", {}, alias) clusters = Engine().process(['cluster', 'list']) self.assertFalse(alias in clusters) run_command("opai unset cluster-alias") # `job submit` if os.path.isfile('mnist.yaml'): self.assertDictEqual( Engine().process(['job', 'submit', '--preview', 'mnist.yaml']), from_file('mnist.yaml')) print("job submit test successfully") # `opai job sub` with incompleted args for k in self.job_c.keys(): print("test sub command without flag --%s" % k) self.run_test_sub(self.job_c, "ls > /dev/null 2>&1", ignore_job_c=[k]) # `opai job sub` self.job_c["gpu"] = 1 rmtree(os.path.dirname(self.job_cfg_file), ignore_errors=True) self.run_test_sub(self.job_c, "ls", error_expected=False) job_config = from_file(self.job_cfg_file) return self.assertListEqual(job_config['extras']["__clusters__"], []) self.assertEqual(self.job_c["gpu"], job_config["taskRoles"][0]["gpuNumber"]) # `cluster add` + `cluster select` + `cluster attach-hdfs` + `job sub` rmtree(os.path.dirname(self.job_cfg_file), ignore_errors=True) run_commands(get_cmd('opai cluster add', self.cluster_ut), get_cmd('opai cluster select %s' % alias, {}), get_cmd("opai cluster attach-hdfs", self.hdfs_ut), get_cmd("opai job sub --preview", self.job_c, "ls")) defaults = Engine().process(['set']) self.assertEqual(defaults["cluster-alias"], alias) job_config = from_file(self.job_cfg_file) self.assertEqual( alias, job_config["extras"]["__clusters__"][0]["cluster_alias"]) job_config2 = Engine().process( ['job', 'submit', '--preview', self.job_cfg_file]) self.assertDictEqual(job_config, job_config2)
def submit_notebook( nb_file=None, # type: str job_name=None, # type: str extra_args=[] # type: list ): """submit_notebook submit current notebook to openpai Arguments: image {str} -- docker image Keyword Arguments: job_dir {str} -- remote storage path to upload code, if None, use user/$USER/jobs/$JOB_NAME (default: {None}) nb_file {str} -- notebook path, if None, use current notebook (default: {None}) client {ClusterClient} -- OpenPAI client, if None, use ClusterClient.from_json('openpai.json', alias) (default: {None}) alias {str} -- client alias (default: {None}) job_name {str} -- job name, if None, use notebook name plus random string (default: {None}) resources {dict} -- resource requirements (default: {{}}) sources {list} -- source files to be uploaded (default: {[]}) pip_requirements {list} -- pip install commands to execute first Returns: [str] -- job name """ commands = ['job', 'fast'] if nb_file is None: nb_file = get_notebook_path() d, fname = os.path.split(nb_file) name = os.path.splitext(fname)[0] if job_name is None: job_name = name + '_' + uuid.uuid4().hex commands.extend(['-j', job_name]) commands.extend(extra_args) # conver to script script_name = name + '.py' run_command(['ipython', 'nbconvert', '--to', 'script', fname], cwd=d) commands.extend(['-s', script_name]) commands.extend(['ipython', script_name]) return Engine().process(commands)
def test_cluster(self): alias = self.cluster_ut["cluster-alias"] # test for command `opai cluster add` and `opai cluster list` run_command("opai unset cluster-alias") run_command("opai cluster delete %s" % alias) run_test_command("opai cluster add", self.cluster_ut) cluster_bk = Engine().process(['cluster', 'list'])[alias] expectedOutput = gen_expected(self.cluster_ut) expectedOutput.update(storages=[], default_storage_alias=None) self.assertDictEqual(expectedOutput, cluster_bk) # test for command `opai cluster attach-hdfs` with self.assertRaises(CalledProcessError): run_test_command("opai cluster attach-hdfs", self.hdfs_ut, "> /dev/null 2>&1") run_test_command("opai cluster attach-hdfs -a %s --default" % alias, self.hdfs_ut) cluster_bk = Engine().process(['cluster', 'list'])[alias] expectedOutput["storages"].append( gen_expected(self.hdfs_ut, user="******", protocol="webHDFS")) expectedOutput["default_storage_alias"] = self.hdfs_ut["storage-alias"] self.assertDictEqual(expectedOutput, cluster_bk)
def do_action_edit(self, args): run_command([args.editor, cluster_cfg_file])
def run_test_command(cmd: Union[str, list], flags: dict, args: Union[list, str] = None): run_command(get_cmd(cmd, flags, args))
def do_action_edit(self, args): run_command([args.editor, __cluster_config_file__]) self.disable_saving["clusters"] = True
import os import sys import shutil from openpaisdk.utils import run_command from openpaisdk.io_utils import browser_open try: import nbmerge except: run_command([sys.executable, '-m pip install nbmerge']) test_notebooks = [ '0-install-sdk-specify-openpai-cluster.ipynb', '1-submit-and-query-via-command-line.ipynb', # '2-submit-job-from-local-notebook.ipynb', ] merged_file = "integrated_tests.ipynb" html_file = os.path.splitext(merged_file)[0] + '.html' shutil.rmtree(merged_file, ignore_errors=True) shutil.rmtree(html_file, ignore_errors=True) # clear output for committing for f in test_notebooks: os.system( "jupyter nbconvert --ClearOutputPreprocessor.enabled=True --inplace %s" % f) os.system('nbmerge %s -o %s' % (' '.join(test_notebooks), merged_file)) os.system( 'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.allow_errors=True --to html --execute %s'