class TestRunnerInMemory: def __init__(self): self.__cmd_utils = CmdUtils() def run_commands(self, commands): start_total = datetime.datetime.now() status_finished = "finished" status_in_progress = "in progress" command_dict = test_info_init command_dict['pid'] = os.getpid() input_data_dict = dict.fromkeys(commands, { "status": "scheduled", "details": {} }) command_dict["started"] = "true" command_dict["commands"] = input_data_dict command_dict["startedat"] = str(datetime.datetime.now()) details = {} for command in commands: start = datetime.datetime.now() command_dict['commands'][command.strip()] = { "status": "scheduled", "details": {} } command_dict['commands'][ command.strip()]['status'] = status_in_progress command_dict['commands'][command.strip()]['startedat'] = str(start) try: if platform.system() == "Windows": details[ command.strip()] = self.__cmd_utils.run_cmd_shell_true( command.split()) else: details[ command.strip()] = self.__cmd_utils.run_cmd_shell_true( [command.strip()]) except Exception as e: details[command.strip()] = "Exception({0})".format(e.__str__()) command_dict['commands'][ command.strip()]['status'] = status_finished end = datetime.datetime.now() command_dict['commands'][command.strip()]['finishedat'] = str(end) command_dict['commands'][command.strip()]['duration'] = round( (end - start).total_seconds()) command_dict['commands'][command.strip()]['details'] = details[ command.strip()] command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round( (end_total - start_total).total_seconds()) return command_dict
def __init__(self): self.command_dict = { "finished": False, "started": False, "startedat": str(datetime.datetime.now()), "finishedat": str(datetime.datetime.now()), "duration": 0.000000, "id": "none", "pid": 0, "commands": {} } self.__cmd_utils = CmdUtils()
def up(file): file_path = Path(file) if not file_path.is_file(): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_path) return CmdUtils.run_cmd_shell_false( ["kubectl", "apply", "-f", file, "--insecure-skip-tls-verify"])
def logs(file): docker_logs_lines = 5000 file_path = Path(file) if not file_path.is_file(): raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), file_path) return CmdUtils.run_cmd_shell_false( ["docker-compose", "-f", file, "logs", "-t", "--tail=" + str(docker_logs_lines)])
def test_start(test_id): test_id = test_id.strip() variables = "testinfo.json" start_py_path = str(Path(".").absolute()) + "/start.py" os.environ['TEMPLATE'] = "start.py" os.environ['VARIABLES'] = variables io_utils = IOUtils() cmd_utils = CmdUtils() http = HttpResponse() input_data = request.data.decode("UTF-8", "replace").strip() if not input_data: return Response(json.dumps(http.failure(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), str(traceback.format_exc()))), 404, mimetype="application/json") try: input_data_list = io_utils.get_filtered_list_regex(input_data.split("\n"), re.compile(r'(\s+|[^a-z]|^)rm\s+.*$')) input_data_list = list(map(lambda x: x.strip(), input_data_list)) test_info_init["id"] = test_id io_utils.write_to_file_dict(EnvConstants.TEST_INFO_PATH, test_info_init) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, exception, str(traceback.format_exc()))), 404, mimetype="application/json") try: os.chmod(start_py_path, stat.S_IRWXU) input_data_list.insert(0, test_id) input_data_list.insert(0, start_py_path) # input_data_list.insert(0, "python") cmd_utils.run_cmd_detached(input_data_list) except Exception as e: result = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, result, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps(http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), test_id)), 200, mimetype="application/json")
def container_docker_network_connect(self, env_id): env_id = env_id.lower() http = HttpResponse() docker_utils = DockerUtils() headers = request.headers service_name = "container" if request.args.get('service') is not None: service_name = request.args.get('service') container_id = f"{env_id}_{service_name}_1" try: status = CmdUtils.run_cmd_shell_false([ "docker", "network", "ls", "--filter", "name={}".format("deployer") ]) app.logger.debug({"msg": status}) if not status.get('out'): raise Exception(status.get('err')) except Exception as e: raise ApiExceptionDocker( ApiCode.GET_DEPLOYER_NETWORK_FAILED.value, ErrorMessage.HTTP_CODE.get( ApiCode.GET_DEPLOYER_NETWORK_FAILED.value), e) try: deployer_network = status.get('out').split("\n")[1].split( " ")[0].strip() if headers.get("Docker-Network"): deployer_network = headers.get("Docker-Network") status = docker_utils.network_connect(deployer_network, container_id) if "already exists in network".lower() in status.get( 'err').lower(): return Response(json.dumps( http.response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), "Success, already connected: " + status.get('err'))), 200, mimetype="application/json") if "Error response from daemon".lower() in status.get( 'err').lower(): raise Exception(status.get('err')) except Exception as e: raise ApiExceptionDocker( ApiCode.CONTAINER_NET_CONNECT_FAILED.value, ErrorMessage.HTTP_CODE.get( ApiCode.CONTAINER_NET_CONNECT_FAILED.value), e) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), status.get('out'))), 200, mimetype="application/json")
def get_active_pods(label_selector, namespace): active_pods = [] status = CmdUtils.run_cmd_shell_false([ "kubectl", "get", "pods", "-n", namespace, "-l", label_selector, "--insecure-skip-tls-verify" ]) active_pods_list = status.get('out').split('\n')[1:] for i in range(0, len(active_pods_list)): active_pods_list[i] = ' '.join(active_pods_list[i].split()) active_pods.append( ActiveDeployment.k8s_pod(f'{namespace}', f'{active_pods_list[i].split()[0]}', active_pods_list[i])) return active_pods
def get_active_deployments(): active_deployments = [] status = CmdUtils.run_cmd_shell_false([ "kubectl", "get", "deployments", "--all-namespaces", "--insecure-skip-tls-verify" ]) active_deployments_list = status.get('out').split('\n')[1:] for i in range(0, len(active_deployments_list)): active_deployments_list[i] = ' '.join( active_deployments_list[i].split()) active_deployments.append( ActiveDeployment.k8s_deployment( f'{active_deployments_list[i].split()[0]}', f'{active_deployments_list[i].split()[1]}', active_deployments_list[i])) return active_deployments
def start_deployment_with_templates(self, template, variables): http = HttpResponse() docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" try: input_json = request.get_json(force=True) for key, value in input_json.items(): if key not in EnvironmentSingleton.get_instance().get_env(): EnvironmentSingleton.get_instance().set_env_var( str(key), str(value)) except Exception as e: app.logger.debug( f"Could not parse the input from the request as JSON: {e.__str__()}" ) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template.strip()) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.VARIABLES, variables.strip()) env_vars = EnvironmentSingleton.get_instance().get_env_and_virtual_env( ) app.logger.debug( {"msg": { "template_file": env_vars.get(EnvConstants.TEMPLATE) }}) app.logger.debug( {"msg": { "variables_file": env_vars.get(EnvConstants.VARIABLES) }}) status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: r = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) IOUtils.create_dir(deploy_dir) IOUtils.write_to_file(file, r.rend_template()) CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
def start_deployment(self): docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" header_key = 'Eureka-Server' eureka_server_header = request.headers.get(f"{header_key}") config_env_vars = EnvStartupSingleton.get_instance( ).get_config_env_vars() input_data = request.data.decode('UTF-8').strip() status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: template_file_name = f"deployment_{deployment_id}.yml" template_file_path = f"{EnvInit.init.get(EnvConstants.TEMPLATES_DIR)}/{template_file_name}" app.logger.debug({ "msg": { "file": template_file_path, "file_content": f"{input_data}" } }) IOUtils.write_to_file(template_file_path, input_data) IOUtils.create_dir(deploy_dir) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template_file_name) env_vars = EnvironmentSingleton.get_instance( ).get_env_and_virtual_env() render = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) if config_env_vars.get( EnvConstants.EUREKA_SERVER) and config_env_vars.get( EnvConstants.APP_IP_PORT): # if {{app_ip_port}} and {{eureka_server}} then register that instance too if '{{app_ip_port}}' in input_data and '{{eureka_server}}' in input_data: eureka_server = config_env_vars.get( EnvConstants.EUREKA_SERVER) # header value overwrite the eureka server if eureka_server_header: eureka_server = eureka_server_header input_data = render.get_jinja2env().get_template( env_vars.get(EnvConstants.TEMPLATE)).render({ "deployment_id": f"{deployment_id}", "eureka_server": eureka_server, "app_ip_port": config_env_vars.get( EnvConstants.APP_IP_PORT).split("/")[0] }) os.remove(template_file_path) if os.path.exists( template_file_path) else None app.logger.debug( {"msg": { "file": file, "file_content": f"{input_data}" }}) IOUtils.write_to_file(file, input_data) if input_data else None CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: app.logger.debug({"msg": docker_utils.down(file)}) raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
def exec_detached(container_id, command): container_exec_cmd = ["docker", "exec", "-d", f"{container_id}"] container_exec_cmd.extend(command) return CmdUtils.run_cmd_shell_false(container_exec_cmd)
def volume_prune(): container_exec_cmd = ["docker", "volume", "prune", "-f"] return CmdUtils.run_cmd_shell_false(container_exec_cmd)
def network_disconnect(deployer_net, container): container_exec_cmd = ["docker", "network", "disconnect", f"{deployer_net}", f"{container}"] return CmdUtils.run_cmd_shell_false(container_exec_cmd)
def network_prune(): container_exec_cmd = ["docker", "network", "prune", "-f"] return CmdUtils.run_cmd_shell_false(container_exec_cmd)
def ps(env_id): env_id = env_id.lower() return CmdUtils.run_cmd_shell_false(["docker", "ps", "--filter", f"name={env_id}"])
def down(deployment, namespace): return CmdUtils.run_cmd_shell_false([ "kubectl", "-n", namespace, "delete", "deployment", deployment, "--insecure-skip-tls-verify" ])
def logs(pod, namespace): return CmdUtils.run_cmd_shell_false([ "kubectl", "-n", namespace, "logs", pod, "--insecure-skip-tls-verify" ])
class CommandInMemory: def __init__(self): self.command_dict = { "finished": False, "started": False, "startedat": str(datetime.datetime.now()), "finishedat": str(datetime.datetime.now()), "duration": 0.000000, "id": "none", "pid": 0, "commands": {} } self.__cmd_utils = CmdUtils() def run_commands(self, commands): start_time = datetime.datetime.now() commands = list(map(lambda item: item.strip(), commands)) self.command_dict['pid'] = os.getpid() input_data_dict = dict.fromkeys(commands, { "status": "scheduled", "details": {} }) self.command_dict["started"] = True self.command_dict["commands"] = input_data_dict self.command_dict["startedat"] = str(datetime.datetime.now()) self.__run_commands(commands) self.command_dict['finished'] = True self.command_dict['started'] = False end_time = datetime.datetime.now() self.command_dict['finishedat'] = str(end_time) self.command_dict['duration'] = (end_time - start_time).total_seconds() return self.command_dict def __run_commands(self, commands): details = {} status_finished = "finished" status_in_progress = "in progress" for command in commands: start_time = datetime.datetime.now() self.command_dict['commands'][command] = { "status": "scheduled", "details": {} } self.command_dict['commands'][command][ 'status'] = status_in_progress self.command_dict['commands'][command]['startedat'] = str( start_time) try: if platform.system() == "Windows": details[command] = self.__cmd_utils.run_cmd_shell_true( shlex.split(command)) else: details[command] = self.__cmd_utils.run_cmd_shell_true( [command]) except Exception as e: details[command] = "Exception({0})".format(e.__str__()) self.command_dict['commands'][command]['status'] = status_finished end_time = datetime.datetime.now() self.command_dict['commands'][command]['finishedat'] = str( end_time) self.command_dict['commands'][command]['duration'] = ( end_time - start_time).total_seconds() self.command_dict['commands'][command]['details'] = details[ command]
def __init__(self): self.__cmd_utils = CmdUtils()
def stop(file): file_path = Path(file) if not file_path.is_file(): raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), file_path) return CmdUtils.run_cmd_shell_false(["docker-compose", "-f", file, "stop"])
class TestRunnerParallel: def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils() def run_command(self, manager_dict, dictionary, command): print("Input json is: " + json.dumps(dictionary) + "\n") status_finished = "finished" status_in_progress = "in progress" dictionary['commands'][command.strip()]['status'] = status_in_progress start = datetime.datetime.now() dictionary['commands'][command.strip()]['startedat'] = str(start) if platform.system() == "Windows": details = self.__cmd_utils.run_cmd_shell_true(command.split()) else: details = self.__cmd_utils.run_cmd_shell_true([command.strip()]) dictionary['commands'][command.strip()]['status'] = status_finished end = datetime.datetime.now() dictionary['commands'][command.strip()]['finishedat'] = str(end) dictionary['commands'][command.strip()]['duration'] = round( (end - start).total_seconds()) dictionary['commands'][command.strip()]['details'] = details manager_dict[command.strip()] = {} manager_dict[command.strip()] = dictionary['commands'][command.strip()] def run_commands(self, json_file, commands): with Manager() as manager: try: manager_dict = manager.dict() command_dict = self.__io_utils.read_dict_from_file(json_file) command_dict['start_pid'] = os.getpid() start_total = datetime.datetime.now() procs = [ Process(target=self.run_command, args=( manager_dict, command_dict, command.strip(), )) for command in commands ] # start processes for p in procs: p.start() p.join() command_dict['commands'] = dict(manager_dict) self.__io_utils.write_to_file_dict(json_file, command_dict) command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round( (end_total - start_total).total_seconds()) self.__io_utils.write_to_file_dict(json_file, command_dict) except Exception as e: raise e