def ensure_namespace_exists(ns): with open(os.devnull, 'wb') as devnull: exit_code = call(["kubectl", "get", "namespace", ns], stdout=devnull, stderr=devnull) if exit_code is not 0: process_helpers.run(["kubectl", "create", "namespace", ns])
def _undeploy_jobs(self, namespace, jobs, all_jobs=False): """undeploy the jobs passed to us jobs: 1 or more jobs to undeploy NOTE: right now there's no case in which some template has both custom and not custom jobs because we check for custom job by if there's a Makefile in the top level of the project """ # simplify logic by `looping` over all jobs even if there's just 1 if not isinstance(jobs, list): jobs = [jobs] # custom jobs require looping over all of them and calling # `make undeploy` on each job recursive_delete = False if files.is_custom('undeploy:') else True if recursive_delete: process_helpers.run([ "kubectl", "--namespace", namespace, "delete", "-f", "k8s", "--recursive" ], raise_on_failure=True) # TODO: have this not be in a loop for job in jobs: self.remove_job_dir(os.path.join('k8s', job)) else: for job in jobs: self._custom_undeploy(job) self.remove_job_dir(os.path.join('k8s', job))
def _init_git_repo(self): """ Initialize new git repo in the project dir and commit initial state. """ process_helpers.run(["git", "init", self.app_name]) process_helpers.run(["git", "add", "."], cwd=self.app_name) print( process_helpers.run(["git", "commit", "-m", "Initial commit."], cwd=self.app_name)) if os.path.exists(os.path.join(self.app_name, ".stignore")): msg = "Once your application is built and deployed try the "\ "following mlt commands:\n"\ "{} - to setup syncing local changes to the running pods "\ "which in turn restarts the containers every time changes "\ "are made to local template code.\n"\ "This command only needs to run once.\n"\ "{} - to wake up the 'sync' agent after a local system "\ "reboot or long inactivity (default is 1 hour) or any other"\ " activity that causes 'sync' agent to die.\n"\ "{} - to teardown syncing setup and stop syncing local "\ "changes with remote pods.\n"\ "This command only need to run once.\n\n"\ "To ignore files and folders from syncing, add them to "\ "{} file." print( msg.format(colored("mlt sync create", attrs=['bold']), colored("mlt sync reload", attrs=['bold']), colored("mlt sync delete", attrs=['bold']), colored(".stignore", attrs=['bold'])))
def _apply_template(self, out, filename): """take k8s-template data and create deployment in k8s dir""" with open(os.path.join('k8s', filename), 'w') as f: f.write(out) process_helpers.run([ "kubectl", "--namespace", self.namespace, "apply", "-R", "-f", "k8s" ])
def _init_git_repo(self): """ Initialize new git repo in the project dir and commit initial state. """ process_helpers.run(["git", "init", self.app_name]) process_helpers.run(["git", "add", "."], cwd=self.app_name) print( process_helpers.run(["git", "commit", "-m", "Initial commit."], cwd=self.app_name))
def _apply_template(self, out, filename, app_name, app_run_id): """take k8s-template data and create deployment in k8s dir""" job_sub_dir = self._track_deployed_job(app_name, app_run_id) with open(os.path.join(job_sub_dir, filename), 'w') as f: f.write(out) process_helpers.run([ "kubectl", "--namespace", self.namespace, "apply", "-R", "-f", job_sub_dir ])
def _undeploy_job(self, namespace, job_name): """undeploy the given job name""" job_dir = "k8s/{}".format(job_name) if files.is_custom('undeploy:'): self._custom_undeploy(job_name) else: process_helpers.run( ["kubectl", "--namespace", namespace, "delete", "-f", job_dir, "--recursive"], raise_on_failure=True) self.remove_job_dir(job_dir)
def test_run_error(check_output): """There was a bad command made, therefore no output""" check_output.side_effect = CalledProcessError(returncode=2, cmd='Bad Command!') with catch_stdout() as caught_output: with pytest.raises(SystemExit): run('ls') output = caught_output.getvalue().strip() # since we're mocking CalledProcessError call, not sure we can simulate # exception raised by actual check_output call, so e.output is None assert output == 'None'
def _apply_template(self, out, filename, app_name, app_run_id): """take k8s-template data and create deployment in k8s dir job_sub_dir will be used in case of a mlt deploy -l to pass in the most current job being deployed to tail just in case there are > 1 jobs that exist since mlt logs requires --job-name if > 1 job """ self.job_sub_dir = self._track_deployed_job(app_name, app_run_id) with open(os.path.join(self.job_sub_dir, filename), 'w') as f: f.write(out) process_helpers.run( ["kubectl", "--namespace", self.namespace, "apply", "-R", "-f", self.job_sub_dir])
def test_run_cwd(check_output): """Assert a command was called with /tmp as working dir This command should return the value of `foo` """ check_output.return_value.decode.return_value = 'foo' output = run('ls', '/tmp') assert output == 'foo'
def test_run_no_cwd(check_output): """Assert a command was called with no current working dir This command should return the value of `bar` """ check_output.return_value.decode.return_value = 'bar' output = run('ls') assert output == 'bar'
def init(self, template='hello-world'): p = Popen([ 'mlt', 'init', '--registry={}'.format( self.registry), '--template-repo={}'.format( basedir()), '--namespace={}'.format(self.namespace), '--template={}'.format(template), self.app_name ], cwd=self.workdir) # keep track of template we created so we can check if it's a TFJob # that terminates pods after completion so we need to check the crd # for status on if job was successful self.template = template assert p.wait() == 0 assert os.path.isfile(self.mlt_json) with open(self.mlt_json) as f: standard_configs = { 'namespace': self.namespace, 'name': self.app_name, 'registry': self.registry } actual_configs = json.loads((f.read())) assert dict(actual_configs, **standard_configs) == actual_configs # verify we created a git repo with our project init assert "On branch master" in run( "git --git-dir={}/.git --work-tree={} status".format( self.project_dir, self.project_dir).split())
def _apply_template(self, out, filename): """take k8s-template data and create deployment in k8s dir job_sub_dir will be used in case of a mlt deploy -l to pass in the most current job being deployed to tail just in case there are > 1 jobs that exist since mlt logs requires --job-name if > 1 job """ with open(os.path.join(self.job_sub_dir, filename), 'w') as f: f.write(out) try: process_helpers.run( ["kubectl", "--namespace", self.namespace, "apply", "-R", "-f", self.job_sub_dir]) except SystemExit: # if anything goes wrong with the deploy delete job subdir shutil.rmtree(self.job_sub_dir) raise
def get_latest_sha(repo): """ Returns latest git sha of given git repo directory. """ cwd = os.getcwd() command = "git rev-list -1 HEAD -- {0}".format(repo) os.chdir(repo) git_sha = process_helpers.run(command.split(" ")) os.chdir(cwd) return git_sha.strip()
def teardown(self): """Allow normal deployment, then undeploy and check status at end of every test. Also delete the namespace because undeploy doesn't do that. """ try: # normal test execution yield finally: # no matter what, undeploy and check status try: self.undeploy_for_test_teardown() self.status() finally: # tear down namespace at end of test try: run(["kubectl", "delete", "ns", self.namespace]) except SystemExit: # this means that the namespace and k8s resources are # already deleted or were never created pass
def _undeploy_jobs(self, namespace, jobs, all_jobs=False): """undeploy the jobs passed to us jobs: 1 or more jobs to undeploy NOTE: right now there's no case in which some template has both custom and not custom jobs because we check for custom job by if there's a Makefile in the top level of the project """ # simplify logic by `looping` over all jobs even if there's just 1 if not isinstance(jobs, list): jobs = [jobs] # custom jobs require looping over all of them and calling # `make undeploy` on each job recursive_delete = False if files.is_custom('undeploy:') else True if recursive_delete: folder_to_delete = 'k8s' if not all_jobs: # only way all_jobs won't be false is if there's # a --job-name flag passed or there's only 1 job to undeploy if len(jobs) != 1: error_handling.throw_error( "There should be only 1 job to undeploy, " "something went wrong. Please file a bug on " "https://github.com/IntelAI/mlt") folder_to_delete = os.path.join(folder_to_delete, jobs[0]) process_helpers.run([ "kubectl", "--namespace", namespace, "delete", "-f", folder_to_delete, "--recursive" ], raise_on_failure=True) # TODO: have this not be in a loop for job in jobs: self.remove_job_dir(os.path.join('k8s', job)) else: for job in jobs: self._custom_undeploy(job, namespace) self.remove_job_dir(os.path.join('k8s', job))
def init(self, template='hello-world', template_repo=basedir(), enable_sync=False, existing_app_dir=None): self._set_new_mlt_project_vars(template) # If there isn't an existing app, run mlt init to create a new one if not existing_app_dir: init_options = [ 'mlt', 'init', '--registry={}'.format(self.registry), '--template-repo={}'.format(template_repo), '--namespace={}'.format(self.namespace), '--template={}'.format(template), self.app_name ] if enable_sync: init_options.append('--enable-sync') self._launch_popen_call(init_options, cwd=pytest.workdir) else: shutil.copytree(existing_app_dir, self.project_dir) self.config("set", "namespace", self.namespace) self.config("set", "name", self.app_name) self.config("set", "registry", self.registry) # keep track of template we created so we can check if it's a TFJob # that terminates pods after completion so we need to check the crd # for status on if job was successful self.template = template assert os.path.isfile(self.mlt_json) with open(self.mlt_json) as f: standard_configs = { 'namespace': self.namespace, 'name': self.app_name, 'registry': self.registry } actual_configs = json.loads((f.read())) assert dict(actual_configs, **standard_configs) == actual_configs if not existing_app_dir: # verify we created a git repo with our project init assert "On branch master" in run( "git --git-dir={}/.git --work-tree={} status".format( self.project_dir, self.project_dir).split()) # setup additional namespace configs for experiments if template == 'experiments': self._setup_experiments_sa()
def init(self, template='hello-world'): p = Popen( ['mlt', 'init', '--registry={}'.format(self.registry), '--template-repo={}'.format(basedir()), '--namespace={}'.format(self.namespace), '--template={}'.format(template), self.app_name], cwd=self.workdir) assert p.wait() == 0 assert os.path.isfile(self.mlt_json) with open(self.mlt_json) as f: standard_configs = { 'namespace': self.namespace, 'name': self.app_name, 'registry': self.registry } actual_configs = json.loads((f.read())) assert dict(actual_configs, **standard_configs) == actual_configs # verify we created a git repo with our project init assert "On branch master" in run( "git --git-dir={}/.git --work-tree={} status".format( self.project_dir, self.project_dir).split())
def _tag(self): process_helpers.run( ["docker", "tag", self.container_name, self.remote_container_name])
def action(self): """Update the template instance with new template version if template is updated """ if "template_name" not in self.config or \ "template_git_sha" not in self.config: print("ERROR: mlt.json does not have either template_name " "or template_git_sha. Template update is not possible.") return app_name = self.config["name"] template_name = self.config["template_name"] current_template_git_sha = self.config["template_git_sha"] orig_project_backup_dir = self._get_backup_dir_name(app_name) with git_helpers.clone_repo(self.template_repo) as temp_clone: application_dir = os.getcwd() clone_template_dir = os.path.join(temp_clone, constants.TEMPLATES_DIR, template_name) if not os.path.exists(clone_template_dir): print("Unable to update, template {} does " "not exist in MLT git repo.".format(template_name)) return latest_template_git_sha = \ git_helpers.get_latest_sha(clone_template_dir) if current_template_git_sha == latest_template_git_sha: print("Template is up to date, no need for update.") else: print("Template is not up to date, updating template...") copy_tree(application_dir, orig_project_backup_dir) os.chdir(temp_clone) # create temp-branch using git sha from which template # was initiated and clean un-tracked files cmd = "git checkout -f {} -b temp-branch && git clean -f .". \ format(current_template_git_sha) process_helpers.run_popen(cmd, shell=True) # copy app dir content to temp clone template dir copy_tree(application_dir, clone_template_dir) # if there are any uncommitted changes to temp-branch, # commit them otherwise 'pull' from master will fail. output = process_helpers.run("git status".split(" ")) if "Your branch is up-to-date" not in output: process_helpers.run("git add --all ".split(" ")) commit_command = "git commit --message 'temp-commit'" process_helpers.run(commit_command.split(" ")) # merging latest template changes by pulling from master # into temp-branch try: process_helpers.run("git pull origin master".split(" "), raise_on_failure=True) except CalledProcessError as e: # When auto merge failed do not error out, # let user review and fix conflicts # for other errors exit error_string = "Automatic merge failed; " \ "fix conflicts and then commit the result" if error_string not in e.output: error_handling.throw_error(e.output) # copy content of clone template dir back to app dir copy_tree(clone_template_dir, application_dir) print("Latest template changes have merged using git, " "please review changes for conflicts. ") print("Backup directory path: {}".format( os.path.abspath(orig_project_backup_dir))) os.chdir(application_dir)