def pack_image(conf, name, image_root, push=False, force=False, tag='dev'): """ :param tag: :param conf: :param name: :param push: :param force: force creation of image if it doesn't exist in repo :param image_root: :return: """ dockerfile_dir = f'{image_root}/{name}' image_name = f'{name}' gcp_conf = conf.providers.gcp _cmd = f'docker images | grep {tag} | grep {image_name};' image_trace = async_cmd(_cmd) logging.info(f"{name} image_trace: {image_trace}") # Check if the list is empty if force or not image_trace: logging.info(f"attempting to create image {name}") os.system(f'docker build -t {image_name}:{tag} {dockerfile_dir};' f'echo "These are the resulting images:";' f'docker images | grep {tag} | grep {image_name};') if push: push_image(gcp_conf)
def delete_pvc(pvc_type, namespace='default'): """ Deletes persistent volume claims. :param pvc_type: claim name or prefix for storage claim automatic creations or stateful sets :param namespace: """ pvc_strings = async_cmd(f'kubectl get pvc -n {namespace}') for pvc_str in pvc_strings: pvc = pvc_str.split(' ')[0] logging.info(pvc) if f'{pvc_type}' in pvc_str: cmd = f"kubectl delete pvc {pvc} -n {namespace} --wait=false" logging.info(f'cmd: {cmd}') response = async_cmd(cmd) logging.info(f'response: {response}')
def replace_dir_contents(origin_path, origin_regex, destination_path, destination_dir_name='artifacts'): """ Used to update executable code for docker image packing e.g. interpreted code and artifacts. Validates existence of directory. Validates existence of a file regex in the directory for sanity purposes. Removes old destination. Copies directory contents to destination under artifacts directory :param origin_path: path to directory containing executables to be packed into image :param origin_regex: Origin file regex e.g. App*.zip (AppV1.zip, AppV2.zip ...) to be found in path for sanity check :param destination_path: A destination directory where the content of the executable directory can be copied to :param destination_dir_name: the name of the destination directory defaults to: artifacts :return: """ target_file = async_cmd(f"find {origin_path} -name {origin_regex}")[0][:-1] logging.info( f'Search results for regex <{origin_regex}> in origin_path: {origin_path}:\n' f'{target_file}') if target_file == '': logging.error( f"couldn't find {origin_path}/{origin_regex}\nPlease make sure it exists in path" ) return logging.info(f"Found {target_file} in target") full_destination = f"{destination_path}/{destination_dir_name}" try: rmtree(full_destination) except Exception as e: logging.error(str(e)) copytree(origin_path, full_destination) logging.info(f"successfully replaced {full_destination}") return target_file
def delete_pv(pv_type, namespace='default'): """ Deletes persistent volumes even if they are protected provided their claims are deleted. :param pv_type: volume name or prefix for pv automatic creations or stateful sets :param namespace: """ escaped = """'{"metadata":{"finalizers": []}}'""" pvc_strings = async_cmd('kubectl get pv') for pv_str in pvc_strings: pv = pv_str.split(' ')[0] logging.info(f'{pv}') if f'{namespace}/{pv_type}' in pv_str: cmd = f"""kubectl patch persistentvolume/{pv} -p {escaped} --type=merge""" logging.info(f'cmd: {cmd}') response = async_cmd(cmd) logging.info(f'response: {response}') cmd = f"kubectl delete persistentvolume/{pv} --wait=false" logging.info(f'cmd: {cmd}') response = async_cmd(cmd) logging.info(f'response: {response}')
def installations(installations, action): installations.helm [print(installation) for installation in installations] if 'kafka' in installations.helm: if action is WieldAction.APPLY: async_cmd('kubectl create ns kafka') async_cmd('helm install --name wielder-kafka --namespace kafka incubator/kafka') elif action is WieldAction.DELETE: async_cmd('helm delete --purge wielder-kafka') elif action is WieldAction.PLAN: print("plan in place to install kafka helm chart")
def init_observe_service(svc_tuple): svc_name = svc_tuple[0] svc_file_path = svc_tuple[1] if len(svc_tuple) > 2: svc_namespace = svc_tuple[2] else: svc_namespace = "default" interval = 5 result = async_cmd(f"kubectl create -f {svc_file_path}") logging.info(f"result: {result}") time_waiting = 0 ip = None svc = None while ip is None: svc = get_service(svc_name, namespace=svc_namespace) if time_waiting > 400: logging.info(f"waited {time_waiting} that'sn enough exiting") return "timeout either provisioning might be too long or some code problem", svc_name, svc break ip = get_svc_ip(svc) if ip is None: try: print(f"\n\nWaited {time_waiting} for {svc_name} going to sleep for {interval}") time.sleep(interval) time_waiting += interval except Exception as e: return svc_name, svc, e return svc_name, svc, ip
def sanity(conf): context = async_cmd('kubectl config current-context')[0][:-1] if conf.kube_context != context: logging.error( f"There is a discrepancy between the configured and actual contexts:" f"\nkube context : {conf.kube_context}" f"\ncurrent context: {context} " f"\neither add context in command-line args or in config file or" f"\nto change context run:" f"\nkubectl config use-context <the context you meant>" f"\n!!! Exiting ...") exit(1) else: logging.info(f"kubernetes current context: {context}") if conf.deploy_env == 'local': if conf.kube_context not in LOCAL_CONTEXTS: logging.error( f"There is a discrepancy between deploy_env: {conf.deploy_env} " f"and kube_context: {conf.kube_context}.\n" f"If you meant to one of these:\n{LOCAL_CONTEXTS} run:\n" f"kubectl config use-context <some local-context>\n" f"!!! Exiting ...") exit(1) logging.info(f"conf.supported_deploy_envs: {conf.supported_deploy_envs}") if conf.deploy_env not in conf.supported_deploy_envs: logging.error(f"We do not support deploy_env: {conf.deploy_env}!!!\n" f"If you want to support it add it in:\n" f"conf file in supported_deploy_envs field\n" f"!!! Exiting ...") exit(1)
def replace_file(origin_path, origin_regex, destination_path, final_name): """ Validates existence of origin_regex Removes old destination Copies target with final_name to destination :param origin_path: :param origin_regex: Origin file regex e.g. App*.zip (AppV1.zip, AppV2.zip ...) :param destination_path: :param final_name: :return: """ target_file = async_cmd(f"find {origin_path} -name {origin_regex}")[0][:-1] logging.info( f'Regex "{origin_regex}"" is fount in origin_path: {target_file}') if target_file == '': logging.warning( f"couldn't find {origin_path}/{origin_regex} please run: ") return logging.info(f"Found {target_file} in target") full_destination = f"{destination_path}/{final_name}" try: os.remove(full_destination) except Exception as e: logging.error(str(e)) copytree(target_file, full_destination) logging.info(f"successfully replaced {full_destination}")
def get_kube_context(): context = async_cmd('kubectl config current-context')[0][:-1] return context
def pep_image(force_last=True, push=False): locale = get_locale(__file__) action, mode, enable_debug, local_mount, service_mode = replace_none_vars_from_args( action=None, mode=None, enable_debug=None, local_mount=None, service_mode=None, project_override=None ) service = WieldService( name='pep', locale=locale, mode=mode, service_mode=service_mode, ) plan = service.plan.module_conf.packaging image_name = plan.image_name tag = plan.git.branch project_root = u.get_project_root() conf = u.get_conf_context_project(project_root=project_root) image_root = u.get_project_image_root() pack_image( conf=conf, name='perl', image_root=image_root, push=False, force=True, tag=tag ) pack_image( conf=conf, name='perl_py', image_root=image_root, push=False, force=True, tag=tag ) try: origin_path = plan.origin_path origin_regex = plan.origin_regex except AttributeError: super_project_root = u.get_super_project_root() origin_path = f'{super_project_root}/micros/perl/pep' origin_regex = 'pep.pl' module_root = u.get_module_root(__file__) image_root = f'{module_root}image' destination_path = f'{image_root}/{image_name}' artifacts_dir = f'{destination_path}/artifacts' artifact_method = plan.artifact_method if artifact_method == ArtifactMethod.GET_DIR.value or artifact_method == ArtifactMethod.INIT_REPO.value: rmtree(artifacts_dir, ignore_errors=True) if artifact_method == ArtifactMethod.GET_DIR.value: replace_dir_contents( origin_path, origin_regex, destination_path=destination_path, destination_dir_name='artifacts' ) else: clone_or_update(source=origin_path, destination=artifacts_dir, branch=plan.git.branch) if artifact_method == ArtifactMethod.INIT_REPO.value: for art in plan.artifacts: os.makedirs(f"{artifacts_dir}/{art[0]}", exist_ok=True) try: copyfile(src=f"{origin_path}/{art[0]}/{art[1]}", dst=f"{artifacts_dir}/{art[0]}/{art[1]}") except Exception as e: logging.error(str(e)) _cmd = f'{artifacts_dir}/pypep/prepare.bash' a = async_cmd(_cmd) for b in a: print(b) pack_image( conf, name=image_name, image_root=image_root, push=False, force=force_last, tag=tag ) gcp_conf = conf.providers.gcp if push: push_image(gcp_conf, name=image_name, group='wielder', tag=tag)