def get_test_info(): http = HttpResponse() io_utils = IOUtils() variables = "testinfo.json" file = EnvConstants.VARIABLES_PATH + "/" + variables try: file_path = Path(file) if not file_path.is_file(): io_utils.write_to_file_dict(file, test_info_init) test_env_vars = json.loads(io_utils.read_file(file)) test_env_vars["processes"] = [p.info for p in psutil.process_iter(attrs=['pid', 'name', 'username', 'status'])] except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.GET_CONTAINER_TEST_INFO_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.GET_CONTAINER_TEST_INFO_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps( http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), test_env_vars)), 200, mimetype="application/json")
def test_start(test_id): test_id = test_id.strip() variables = "testinfo.json" start_py_path = str(Path(".").absolute()) + "/start.py" os.environ['TEMPLATE'] = "start.py" os.environ['VARIABLES'] = variables io_utils = IOUtils() cmd_utils = CmdUtils() http = HttpResponse() input_data = request.data.decode("UTF-8", "replace").strip() if not input_data: return Response(json.dumps(http.failure(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), str(traceback.format_exc()))), 404, mimetype="application/json") try: input_data_list = io_utils.get_filtered_list_regex(input_data.split("\n"), re.compile(r'(\s+|[^a-z]|^)rm\s+.*$')) input_data_list = list(map(lambda x: x.strip(), input_data_list)) test_info_init["id"] = test_id io_utils.write_to_file_dict(EnvConstants.TEST_INFO_PATH, test_info_init) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, exception, str(traceback.format_exc()))), 404, mimetype="application/json") try: os.chmod(start_py_path, stat.S_IRWXU) input_data_list.insert(0, test_id) input_data_list.insert(0, start_py_path) # input_data_list.insert(0, "python") cmd_utils.run_cmd_detached(input_data_list) except Exception as e: result = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, result, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps(http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), test_id)), 200, mimetype="application/json")
class TestRunner: def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils() def run_commands(self, json_file, test_id, commands): start_total = datetime.datetime.now() status_finished = "finished" status_in_progress = "in progress" command_dict = test_info_init command_dict['id'] = test_id command_dict['pid'] = os.getpid() input_data_dict = dict.fromkeys(commands, {"status": "scheduled", "details": {}}) command_dict["started"] = "true" command_dict["commands"] = input_data_dict command_dict["startedat"] = str(datetime.datetime.now()) details = {} for command in commands: start = datetime.datetime.now() command_dict['commands'][command.strip()] = {"status": "scheduled", "details": {}} command_dict['commands'][command.strip()]['status'] = status_in_progress command_dict['commands'][command.strip()]['startedat'] = str(start) self.__io_utils.write_to_file_dict(json_file, command_dict) try: if platform.system() == "Windows": details[command.strip()] = self.__cmd_utils.run_cmd_shell_true(command.split()) else: details[command.strip()] = self.__cmd_utils.run_cmd_shell_true([command.strip()]) except Exception as e: details[command.strip()] = "Exception({0})".format(e.__str__()) command_dict['commands'][command.strip()]['status'] = status_finished end = datetime.datetime.now() command_dict['commands'][command.strip()]['finishedat'] = str(end) command_dict['commands'][command.strip()]['duration'] = round((end - start).total_seconds()) command_dict['commands'][command.strip()]['details'] = details[command.strip()] self.__io_utils.write_to_file_dict(json_file, command_dict) command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round((end_total - start_total).total_seconds()) self.__io_utils.write_to_file_dict(json_file, command_dict) return command_dict
def start_deployment_with_templates(self, template, variables): http = HttpResponse() docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" try: input_json = request.get_json(force=True) for key, value in input_json.items(): if key not in EnvironmentSingleton.get_instance().get_env(): EnvironmentSingleton.get_instance().set_env_var( str(key), str(value)) except Exception as e: app.logger.debug( f"Could not parse the input from the request as JSON: {e.__str__()}" ) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template.strip()) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.VARIABLES, variables.strip()) env_vars = EnvironmentSingleton.get_instance().get_env_and_virtual_env( ) app.logger.debug( {"msg": { "template_file": env_vars.get(EnvConstants.TEMPLATE) }}) app.logger.debug( {"msg": { "variables_file": env_vars.get(EnvConstants.VARIABLES) }}) status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: r = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) IOUtils.create_dir(deploy_dir) IOUtils.write_to_file(file, r.rend_template()) CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
def start_deployment(self): docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" header_key = 'Eureka-Server' eureka_server_header = request.headers.get(f"{header_key}") config_env_vars = EnvStartupSingleton.get_instance( ).get_config_env_vars() input_data = request.data.decode('UTF-8').strip() status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: template_file_name = f"deployment_{deployment_id}.yml" template_file_path = f"{EnvInit.init.get(EnvConstants.TEMPLATES_DIR)}/{template_file_name}" app.logger.debug({ "msg": { "file": template_file_path, "file_content": f"{input_data}" } }) IOUtils.write_to_file(template_file_path, input_data) IOUtils.create_dir(deploy_dir) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template_file_name) env_vars = EnvironmentSingleton.get_instance( ).get_env_and_virtual_env() render = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) if config_env_vars.get( EnvConstants.EUREKA_SERVER) and config_env_vars.get( EnvConstants.APP_IP_PORT): # if {{app_ip_port}} and {{eureka_server}} then register that instance too if '{{app_ip_port}}' in input_data and '{{eureka_server}}' in input_data: eureka_server = config_env_vars.get( EnvConstants.EUREKA_SERVER) # header value overwrite the eureka server if eureka_server_header: eureka_server = eureka_server_header input_data = render.get_jinja2env().get_template( env_vars.get(EnvConstants.TEMPLATE)).render({ "deployment_id": f"{deployment_id}", "eureka_server": eureka_server, "app_ip_port": config_env_vars.get( EnvConstants.APP_IP_PORT).split("/")[0] }) os.remove(template_file_path) if os.path.exists( template_file_path) else None app.logger.debug( {"msg": { "file": file, "file_content": f"{input_data}" }}) IOUtils.write_to_file(file, input_data) if input_data else None CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: app.logger.debug({"msg": docker_utils.down(file)}) raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
host = '0.0.0.0' fluentd_tag = "startup" variables = "testinfo.json" message_dumper = MessageDumper() io_utils = IOUtils() if os.environ.get('PORT'): port = int(os.environ.get("PORT")) # override port if set from env io_utils.create_dir(Path(EnvConstants.TEMPLATES_PATH)) io_utils.create_dir(Path(EnvConstants.VARIABLES_PATH)) file = EnvConstants.VARIABLES_PATH + "/" + variables try: test_info_init["pid"] = os.getpid() io_utils.write_to_file_dict(Path(file), test_info_init) except Exception as e: raise e environ_dump = message_dumper.dump_message(dict(os.environ)) ip_port_dump = message_dumper.dump_message({"host": host, "port": port}) app.logger.debug({"msg": environ_dump}) app.logger.debug({"msg": ip_port_dump}) fluentd_utils.debug(fluentd_tag, environ_dump) app.run(host=host, port=port)
class TestRunnerParallel: def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils() def run_command(self, manager_dict, dictionary, command): print("Input json is: " + json.dumps(dictionary) + "\n") status_finished = "finished" status_in_progress = "in progress" dictionary['commands'][command.strip()]['status'] = status_in_progress start = datetime.datetime.now() dictionary['commands'][command.strip()]['startedat'] = str(start) if platform.system() == "Windows": details = self.__cmd_utils.run_cmd_shell_true(command.split()) else: details = self.__cmd_utils.run_cmd_shell_true([command.strip()]) dictionary['commands'][command.strip()]['status'] = status_finished end = datetime.datetime.now() dictionary['commands'][command.strip()]['finishedat'] = str(end) dictionary['commands'][command.strip()]['duration'] = round( (end - start).total_seconds()) dictionary['commands'][command.strip()]['details'] = details manager_dict[command.strip()] = {} manager_dict[command.strip()] = dictionary['commands'][command.strip()] def run_commands(self, json_file, commands): with Manager() as manager: try: manager_dict = manager.dict() command_dict = self.__io_utils.read_dict_from_file(json_file) command_dict['start_pid'] = os.getpid() start_total = datetime.datetime.now() procs = [ Process(target=self.run_command, args=( manager_dict, command_dict, command.strip(), )) for command in commands ] # start processes for p in procs: p.start() p.join() command_dict['commands'] = dict(manager_dict) self.__io_utils.write_to_file_dict(json_file, command_dict) command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round( (end_total - start_total).total_seconds()) self.__io_utils.write_to_file_dict(json_file, command_dict) except Exception as e: raise e