def get_test_info(): http = HttpResponse() io_utils = IOUtils() variables = "testinfo.json" file = EnvConstants.VARIABLES_PATH + "/" + variables try: file_path = Path(file) if not file_path.is_file(): io_utils.write_to_file_dict(file, test_info_init) test_env_vars = json.loads(io_utils.read_file(file)) test_env_vars["processes"] = [p.info for p in psutil.process_iter(attrs=['pid', 'name', 'username', 'status'])] except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.GET_CONTAINER_TEST_INFO_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.GET_CONTAINER_TEST_INFO_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps( http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), test_env_vars)), 200, mimetype="application/json")
def delete_deployment_id(self, depl_id): depl_id = depl_id.strip() docker_utils = DockerUtils() depl_folder = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{depl_id}" file = f"{depl_folder}/docker-compose.yml" try: status = docker_utils.down(file) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise Exception(status.get('err')) app.logger.debug({"msg": status}) status = docker_utils.ps(depl_id) result = status.get('out').split("\n")[1:] DeploymentMetadataSingleton.get_instance( ).delete_metadata_for_deployment(depl_id) IOUtils.remove_directory(f"{depl_folder}") except Exception as e: raise ApiExceptionDocker( ApiCode.DEPLOY_STOP_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_STOP_FAILURE.value), e) return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), result)), 200, mimetype="application/json")
def get_results_folder(): io_utils = IOUtils() http = HttpResponse() archive_name = "results" header_key = 'Folder-Path' folder_path = request.headers.get(f"{header_key}") if not folder_path: return Response(json.dumps(http.failure(ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, str(traceback.format_exc()))), 404, mimetype="application/json") try: io_utils.zip_file(archive_name, folder_path) except FileNotFoundError as e: result = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.GET_FILE_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.GET_FILE_FAILURE), result, str(traceback.format_exc()))), 404, mimetype="application/json") except: result = "Exception({0})".format(sys.exc_info()[0]) return Response(json.dumps(http.failure(ApiCodeConstants.FOLDER_ZIP_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.FOLDER_ZIP_FAILURE) % folder_path, result, str(traceback.format_exc()))), 404, mimetype="application/json") return flask.send_file( f"/tmp/{archive_name}.zip", mimetype='application/zip', as_attachment=True), 200
def test_stop(): io_utils = IOUtils() process_utils = ProcessUtils(logger) http = HttpResponse() variables = "testinfo.json" id = json.loads(io_utils.read_file(os.environ.get('VARS_DIR') + f"/{variables}"))["id"] try: response = get_test_info() pid = json.loads(response.get_data()).get('message').get('pid') if not isinstance(pid, str): if psutil.pid_exists(int(pid)): parent = psutil.Process() children = parent.children() for p in children: p.terminate() _, alive = psutil.wait_procs(children, timeout=3, callback=process_utils.on_terminate) for p in alive: p.kill() except: exception = "Exception({0})".format(sys.exc_info()[0]) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_STOP_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_STOP_FAILURE) % id, exception, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps(http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), id)), 200, mimetype="application/json")
def deploy_start(self): http = HttpResponse() kubectl_utils = KubectlUtils() fluentd_tag = "deploy_start" token = token_hex(8) deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{token}" file = f"{deploy_dir}/k8s-deployment.yml" try: IOUtils.create_dir(deploy_dir) input_data = request.data.decode('utf-8') IOUtils.write_to_file(file, input_data) status = kubectl_utils.up(f"{file}") self.fluentd.emit(tag=fluentd_tag, msg={"msg": status}) if status.get('err'): raise Exception(status.get('err')) except Exception as e: raise ApiExceptionKubectl( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), token)), 200, mimetype="application/json")
def env_clean_up(fluentd_utils, path=EnvInit.init.get(EnvConstants.DEPLOY_PATH), env_expire_in=1440): # 1 day fluentd_tag = 'docker_env_clean_up' message_dumper = MessageDumper() active_deployments = [item.get('id') for item in DockerUtils.get_active_deployments()] for item in active_deployments: directory = f"{path}/{item}" if (datetime.datetime.now() - datetime.datetime.fromtimestamp( os.path.getmtime(directory))) > datetime.timedelta(minutes=env_expire_in): result = DockerUtils.down(f"{directory}/docker-compose.yml") IOUtils.remove_directory(directory) fluentd_utils.emit(tag=fluentd_tag, msg=message_dumper.dump_message( {"action": f"{fluentd_tag}", "out": result.get('out'), "err": result.get('err')}))
def test_start(test_id): test_id = test_id.strip() variables = "testinfo.json" start_py_path = str(Path(".").absolute()) + "/start.py" os.environ['TEMPLATE'] = "start.py" os.environ['VARIABLES'] = variables io_utils = IOUtils() cmd_utils = CmdUtils() http = HttpResponse() input_data = request.data.decode("UTF-8", "replace").strip() if not input_data: return Response(json.dumps(http.failure(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), str(traceback.format_exc()))), 404, mimetype="application/json") try: input_data_list = io_utils.get_filtered_list_regex(input_data.split("\n"), re.compile(r'(\s+|[^a-z]|^)rm\s+.*$')) input_data_list = list(map(lambda x: x.strip(), input_data_list)) test_info_init["id"] = test_id io_utils.write_to_file_dict(EnvConstants.TEST_INFO_PATH, test_info_init) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, exception, str(traceback.format_exc()))), 404, mimetype="application/json") try: os.chmod(start_py_path, stat.S_IRWXU) input_data_list.insert(0, test_id) input_data_list.insert(0, start_py_path) # input_data_list.insert(0, "python") cmd_utils.run_cmd_detached(input_data_list) except Exception as e: result = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, result, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps(http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), test_id)), 200, mimetype="application/json")
def send_request(self, app, request_object): resp = self.send_http_request(app, request_object=request_object) try: response = resp.json() response["homePageUrl"] = app.get('homePageUrl') response["ip_port"] = f"{app.get('ipAddr')}:{app.get('port')}" self.response_list.append(response) except: file_name = re.findall( ".*filename=(.+)", resp.headers["Content-Disposition"] )[0] if resp.headers.get("Content-Disposition") else "" self.__is_response_zip = True IOUtils.create_dir( f"{self.download_folder}/{self.__source_zip_folder}") IOUtils.write_to_file( f"{self.download_folder}/{self.__source_zip_folder}/{app.get('ipAddr')}_{app.get('port')}_{file_name}", resp.content)
def get_active_deployments(): active_deployments = [] env_list = [folder.lower() for folder in IOUtils.get_list_dir(f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}")] for item in env_list: container_list = DockerUtils.ps(item).get('out').split("\n")[1:] for container in container_list: if item in container: active_deployments.append(ActiveDeployment.docker_deployment(item.strip(), container_list)) break return active_deployments
def execute_command(): test_id = "none" variables = "commandinfo.json" os.environ['TEMPLATE'] = "start.py" os.environ['VARIABLES'] = variables io_utils = IOUtils() http = HttpResponse() input_data = request.data.decode("UTF-8", "replace").strip() if not input_data: return Response(json.dumps(http.failure(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), ErrorCodes.HTTP_CODE.get(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), str(traceback.format_exc()))), 404, mimetype="application/json") try: input_data_list = io_utils.get_filtered_list_regex(input_data.split("\n"), re.compile(r'(\s+|[^a-z]|^)rm\s+.*$')) input_data_list = list(map(lambda x: x.strip(), input_data_list)) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.TEST_START_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.TEST_START_FAILURE) % test_id, exception, str(traceback.format_exc()))), 404, mimetype="application/json") try: testrunner = TestRunnerInMemory() response = testrunner.run_commands(input_data_list) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.COMMAND_EXEC_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.COMMAND_EXEC_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps( http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), response)), 200, mimetype="application/json")
def folder_clean_up(path=EnvInit.init.get(EnvConstants.DEPLOY_PATH), delete_period=60): deleted_folders = [] active_deployments = [item.get('id') for item in DockerUtils.get_active_deployments()] full_deployments_list = map(lambda x: x.rstrip(), IOUtils.get_list_dir(f"{path}")) for item in full_deployments_list: if item not in active_deployments and (datetime.datetime.now() - datetime.datetime.fromtimestamp( os.path.getmtime(f"{path}/{item}"))) > datetime.timedelta(minutes=delete_period): shutil.rmtree(f"{path}/{item}") deleted_folders.append(item) return deleted_folders
def upload_file(): io_utils = IOUtils() http = HttpResponse() header_key = 'File-Path' try: file_content = request.get_data() file_path = request.headers.get(f"{header_key}") if not file_path: return Response(json.dumps(http.failure(ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, str(traceback.format_exc()))), 404, mimetype="application/json") if not file_content: return Response(json.dumps(http.failure(ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), ErrorCodes.HTTP_CODE.get( ApiCodeConstants.EMPTY_REQUEST_BODY_PROVIDED), str(traceback.format_exc()))), 404, mimetype="application/json") except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.UPLOAD_TEST_CONFIG_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.UPLOAD_TEST_CONFIG_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") try: io_utils.write_to_file_binary(file_path, file_content) except Exception as e: exception = "Exception({0})".format(e.__str__()) return Response(json.dumps(http.failure(ApiCodeConstants.UPLOAD_TEST_CONFIG_FAILURE, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.UPLOAD_TEST_CONFIG_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") return Response( json.dumps(http.success(ApiCodeConstants.SUCCESS, ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS), ErrorCodes.HTTP_CODE.get(ApiCodeConstants.SUCCESS))), 200, mimetype="application/json")
def upload_file(self): http = HttpResponse() io_utils = IOUtils() header_key = 'File-Path' file_content = request.get_data() file_path = request.headers.get(f"{header_key}") if not file_path: raise ApiExceptionDocker( ApiCode.HTTP_HEADER_NOT_PROVIDED.value, ErrorMessage.HTTP_CODE.get( ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % header_key, ErrorMessage.HTTP_CODE.get( ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % header_key) if not file_content: raise ApiExceptionDocker( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value, ErrorMessage.HTTP_CODE.get( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value), ErrorMessage.HTTP_CODE.get( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value)) try: io_utils.write_to_file_binary(file_path, file_content) except Exception as e: raise ApiExceptionDocker( ApiCode.UPLOAD_FILE_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.UPLOAD_FILE_FAILURE.value), e) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value))), 200, mimetype="application/json")
def get_file(): io_utils = IOUtils() http = HttpResponse() header_key = 'File-Path' file_path = request.headers.get(f"{header_key}") if not file_path: return Response(json.dumps(http.failure(ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.HTTP_HEADER_NOT_PROVIDED) % header_key, str(traceback.format_exc()))), 404, mimetype="application/json") try: response = io_utils.read_file_byte_array(file_path), 200 except Exception as e: exception = "Exception({0})".format(e.__str__()) response = Response(json.dumps(http.failure(ApiCodeConstants.GET_FILE_FAILURE, ErrorCodes.HTTP_CODE.get( ApiCodeConstants.GET_FILE_FAILURE), exception, str(traceback.format_exc()))), 404, mimetype="application/json") return response
def get_file(self): header_key = 'File-Path' file_path = request.headers.get(f"{header_key}") if not file_path: raise ApiExceptionKubectl( ApiCode.HTTP_HEADER_NOT_PROVIDED.value, ErrorMessage.HTTP_CODE.get( ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % header_key, ErrorMessage.HTTP_CODE.get( ApiCode.HTTP_HEADER_NOT_PROVIDED.value) % header_key) try: file_content = IOUtils.read_file(file_path) except Exception as e: raise ApiExceptionKubectl( ApiCode.GET_FILE_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.GET_FILE_FAILURE.value), e) return Response(file_content, 200, mimetype="text/plain")
def receive_prepared_deployment_and_unpack(self): token = token_hex(8) io_utils = IOUtils() deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file_path = f"{deploy_dir}/archive.zip" file_content = request.get_data() # send here the complete env. The deployment template can be overridden at deploy start if not file_content: raise ApiExceptionDocker( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value, ErrorMessage.HTTP_CODE.get( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value), ErrorMessage.HTTP_CODE.get( ApiCode.EMPTY_REQUEST_BODY_PROVIDED.value)) try: io_utils.create_dir(deploy_dir) io_utils.write_to_file_binary(file_path, file_content) except Exception as e: raise ApiExceptionDocker( ApiCode.UPLOAD_FILE_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.UPLOAD_FILE_FAILURE.value), e) try: shutil.unpack_archive(file_path, deploy_dir) io_utils.remove_file(file_path) except Exception as e: raise ApiExceptionDocker( ApiCode.FOLDER_UNZIP_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.FOLDER_UNZIP_FAILURE.value), e) return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
def set_metadata_for_deployment_from_file(self, deployment, file): """ labels is dict. """ metadata = {} if self.metadata.get( deployment) is not None: # jump over, already in memory return False try: metadata = json.loads(IOUtils.read_file(file=file)) except: pass # don't care. metadata is optional if not isinstance(metadata, dict): return False if len(self.metadata ) < self.METADATA_SPACE_MAX_SIZE and deployment != "": self.metadata[deployment] = metadata return True return False
class TestRunner: def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils() def run_commands(self, json_file, test_id, commands): start_total = datetime.datetime.now() status_finished = "finished" status_in_progress = "in progress" command_dict = test_info_init command_dict['id'] = test_id command_dict['pid'] = os.getpid() input_data_dict = dict.fromkeys(commands, {"status": "scheduled", "details": {}}) command_dict["started"] = "true" command_dict["commands"] = input_data_dict command_dict["startedat"] = str(datetime.datetime.now()) details = {} for command in commands: start = datetime.datetime.now() command_dict['commands'][command.strip()] = {"status": "scheduled", "details": {}} command_dict['commands'][command.strip()]['status'] = status_in_progress command_dict['commands'][command.strip()]['startedat'] = str(start) self.__io_utils.write_to_file_dict(json_file, command_dict) try: if platform.system() == "Windows": details[command.strip()] = self.__cmd_utils.run_cmd_shell_true(command.split()) else: details[command.strip()] = self.__cmd_utils.run_cmd_shell_true([command.strip()]) except Exception as e: details[command.strip()] = "Exception({0})".format(e.__str__()) command_dict['commands'][command.strip()]['status'] = status_finished end = datetime.datetime.now() command_dict['commands'][command.strip()]['finishedat'] = str(end) command_dict['commands'][command.strip()]['duration'] = round((end - start).total_seconds()) command_dict['commands'][command.strip()]['details'] = details[command.strip()] self.__io_utils.write_to_file_dict(json_file, command_dict) command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round((end_total - start_total).total_seconds()) self.__io_utils.write_to_file_dict(json_file, command_dict) return command_dict
def start_deployment(self): docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" header_key = 'Eureka-Server' eureka_server_header = request.headers.get(f"{header_key}") config_env_vars = EnvStartupSingleton.get_instance( ).get_config_env_vars() input_data = request.data.decode('UTF-8').strip() status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: template_file_name = f"deployment_{deployment_id}.yml" template_file_path = f"{EnvInit.init.get(EnvConstants.TEMPLATES_DIR)}/{template_file_name}" app.logger.debug({ "msg": { "file": template_file_path, "file_content": f"{input_data}" } }) IOUtils.write_to_file(template_file_path, input_data) IOUtils.create_dir(deploy_dir) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template_file_name) env_vars = EnvironmentSingleton.get_instance( ).get_env_and_virtual_env() render = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) if config_env_vars.get( EnvConstants.EUREKA_SERVER) and config_env_vars.get( EnvConstants.APP_IP_PORT): # if {{app_ip_port}} and {{eureka_server}} then register that instance too if '{{app_ip_port}}' in input_data and '{{eureka_server}}' in input_data: eureka_server = config_env_vars.get( EnvConstants.EUREKA_SERVER) # header value overwrite the eureka server if eureka_server_header: eureka_server = eureka_server_header input_data = render.get_jinja2env().get_template( env_vars.get(EnvConstants.TEMPLATE)).render({ "deployment_id": f"{deployment_id}", "eureka_server": eureka_server, "app_ip_port": config_env_vars.get( EnvConstants.APP_IP_PORT).split("/")[0] }) os.remove(template_file_path) if os.path.exists( template_file_path) else None app.logger.debug( {"msg": { "file": file, "file_content": f"{input_data}" }}) IOUtils.write_to_file(file, input_data) if input_data else None CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: app.logger.debug({"msg": docker_utils.down(file)}) raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
def start_deployment_with_template(self, template, variables): http = HttpResponse() kubectl_utils = KubectlUtils() fluentd_tag = "deploy_start" try: input_json = request.get_json(force=True) for key, value in input_json.items(): if key not in EnvironmentSingleton.get_instance().get_env(): EnvironmentSingleton.get_instance().set_env_var( str(key), str(value)) except Exception as e: app.logger.debug(f"Exception: {e.__str__()}") EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template.strip()) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.VARIABLES, variables.strip()) app.logger.debug({ "msg": { "template_file": EnvironmentSingleton.get_instance().get_env_and_virtual_env(). get(EnvConstants.TEMPLATE) } }) app.logger.debug({ "msg": { "variables_file": EnvironmentSingleton.get_instance().get_env_and_virtual_env(). get(EnvConstants.VARIABLES) } }) token = token_hex(8) deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{token}" file = f"{deploy_dir}/k8s-deployment.yml" try: r = Render( EnvironmentSingleton.get_instance().get_env_and_virtual_env( ).get(EnvConstants.TEMPLATE), EnvironmentSingleton.get_instance().get_env_and_virtual_env(). get(EnvConstants.VARIABLES)) IOUtils.create_dir(deploy_dir) IOUtils.write_to_file(file) IOUtils.write_to_file(file, r.rend_template()) status = kubectl_utils.up(f"{file}") self.fluentd.emit(tag=fluentd_tag, msg={"msg": status}) if status.get('err'): raise Exception(status.get('error')) result = str(token) except Exception as e: raise ApiExceptionKubectl( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), result)), 200, mimetype="application/json")
from about import properties from rest.api.constants.env_constants import EnvConstants from rest.api.definitions import test_info_init from rest.api.logginghelpers.message_dumper import MessageDumper from rest.api.routes import app from rest.api.routes import fluentd_utils from rest.utils.io_utils import IOUtils if __name__ == "__main__": port = properties["port"] host = '0.0.0.0' fluentd_tag = "startup" variables = "testinfo.json" message_dumper = MessageDumper() io_utils = IOUtils() if os.environ.get('PORT'): port = int(os.environ.get("PORT")) # override port if set from env io_utils.create_dir(Path(EnvConstants.TEMPLATES_PATH)) io_utils.create_dir(Path(EnvConstants.VARIABLES_PATH)) file = EnvConstants.VARIABLES_PATH + "/" + variables try: test_info_init["pid"] = os.getpid() io_utils.write_to_file_dict(Path(file), test_info_init) except Exception as e: raise e
class TestRunnerParallel: def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils() def run_command(self, manager_dict, dictionary, command): print("Input json is: " + json.dumps(dictionary) + "\n") status_finished = "finished" status_in_progress = "in progress" dictionary['commands'][command.strip()]['status'] = status_in_progress start = datetime.datetime.now() dictionary['commands'][command.strip()]['startedat'] = str(start) if platform.system() == "Windows": details = self.__cmd_utils.run_cmd_shell_true(command.split()) else: details = self.__cmd_utils.run_cmd_shell_true([command.strip()]) dictionary['commands'][command.strip()]['status'] = status_finished end = datetime.datetime.now() dictionary['commands'][command.strip()]['finishedat'] = str(end) dictionary['commands'][command.strip()]['duration'] = round( (end - start).total_seconds()) dictionary['commands'][command.strip()]['details'] = details manager_dict[command.strip()] = {} manager_dict[command.strip()] = dictionary['commands'][command.strip()] def run_commands(self, json_file, commands): with Manager() as manager: try: manager_dict = manager.dict() command_dict = self.__io_utils.read_dict_from_file(json_file) command_dict['start_pid'] = os.getpid() start_total = datetime.datetime.now() procs = [ Process(target=self.run_command, args=( manager_dict, command_dict, command.strip(), )) for command in commands ] # start processes for p in procs: p.start() p.join() command_dict['commands'] = dict(manager_dict) self.__io_utils.write_to_file_dict(json_file, command_dict) command_dict['finished'] = "true" command_dict['started'] = "false" end_total = datetime.datetime.now() command_dict['finishedat'] = str(end_total) command_dict['duration'] = round( (end_total - start_total).total_seconds()) self.__io_utils.write_to_file_dict(json_file, command_dict) except Exception as e: raise e
def __init__(self): self.__cmd_utils = CmdUtils() self.__io_utils = IOUtils()
DockerView.register(app=app) KubectlView.register(app=app) app.register_error_handler(ApiExceptionDocker, DockerView.handle_api_error) app.register_error_handler(ApiExceptionKubectl, KubectlView.handle_api_error) if __name__ == "__main__": cli = sys.modules['flask.cli'] cli.show_server_banner = lambda *x: None fluentd_tag = "startup" host = '0.0.0.0' port = EnvStartupSingleton.get_instance().get_config_env_vars().get(EnvConstants.PORT) message_dumper = MessageDumper() io_utils = IOUtils() if EnvStartupSingleton.get_instance().get_config_env_vars().get(EnvConstants.EUREKA_SERVER): Eureka(EnvStartupSingleton.get_instance().get_config_env_vars().get(EnvConstants.EUREKA_SERVER)).register_app( EnvStartupSingleton.get_instance().get_config_env_vars().get(EnvConstants.APP_IP_PORT), EnvStartupSingleton.get_instance().get_config_env_vars().get(EnvConstants.APP_APPEND_LABEL)) io_utils.create_dirs([Path(EnvInit.init.get(EnvConstants.DEPLOY_PATH)), Path(EnvInit.init.get(EnvConstants.TEMPLATES_DIR)), Path(EnvInit.init.get(EnvConstants.VARS_DIR))]) DockerEnvExpireScheduler(fluentd_utils=DockerView.fluentd, poll_interval=EnvStartupSingleton.get_instance().get_config_env_vars().get( EnvConstants.SCHEDULER_POLL_INTERVAL), # seconds env_expire_in=EnvStartupSingleton.get_instance().get_config_env_vars().get( EnvConstants.ENV_EXPIRE_IN)).start() # minutes
def agents_request(text): path = text.strip() eureka = Eureka( EnvStartupSingleton.get_instance().get_config_env_vars().get( EnvConstants.EUREKA_SERVER)) ip_addr_port_header_key = 'IpAddr-Port' # target specific agent home_page_url_header_key = 'HomePageUrl' # target specific agent application = "agent" try: input_data = request.get_data() except: input_data = "" try: request_object = { "uri": path.lstrip("/"), "method": request.method, "headers": request.headers, "data": input_data } app.logger.debug({"msg": f"{request_object}"}) agent_apps = eureka.get_type_eureka_apps(application) if request.headers.get(f"{ip_addr_port_header_key}"): # not mandatory ip_port_list = request.headers.get( f"{ip_addr_port_header_key}").split(",") agent_apps = [ app for app in agent_apps if f"{app.get('ipAddr')}:{app.get('port')}" in ip_port_list ] if request.headers.get(f"{home_page_url_header_key}"): # not mandatory home_page_url_list = request.headers.get( f"{home_page_url_header_key}").split(",") agent_apps = [ app for app in agent_apps if f"{app.get('homePageUrl')}" in home_page_url_list ] th_utils = ThreadUtils(apps=agent_apps, headers={}) th_utils.spawn_threads_send_request(request_object) except Exception as e: raise ApiException( ApiCode.DISCOVERY_ERROR.value, ErrorMessage.HTTP_CODE.get(ApiCode.DISCOVERY_ERROR.value), e) threads_response = th_utils.get_threads_response() if th_utils.is_response_zip(): folder_path = f"{th_utils.download_folder}/{th_utils.get_source_zip_folder()}" archive_path = f"{th_utils.download_folder}/{th_utils.get_source_zip_folder()}" try: IOUtils.zip_file(archive_path, folder_path) except Exception as e: raise ApiException( ApiCode.FOLDER_ZIP_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.FOLDER_ZIP_FAILURE.value) % folder_path, e) IOUtils.delete_file(folder_path) return flask.send_file(f"{archive_path}.zip", mimetype='application/zip', as_attachment=True), 200 return Response(json.dumps(HttpResponse().response( ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), threads_response)), 200, mimetype="application/json")
#!/usr/bin/env python3 import json import os import sys from rest.utils.io_utils import IOUtils from rest.utils.testrunner import TestRunner if __name__ == '__main__': io_utils = IOUtils() WORKSPACE = os.environ.get('WORKSPACE') if os.environ.get('WORKSPACE') else "tmp" VARIABLES_PATH = WORKSPACE + "/variables" COMMAND_LOGGER_PATH = WORKSPACE + "/commandlogger.txt" file_path = VARIABLES_PATH + "/testinfo.json" io_utils.append_to_file(COMMAND_LOGGER_PATH, " ".join(sys.argv)) min_args = 3 if len(sys.argv) < min_args: raise Exception( "Error: Expecting at least {} args. Got {}, args={}".format(min_args, len(sys.argv), sys.argv)) test_id = sys.argv[1] commands_list = sys.argv[2:] test_runner = TestRunner() dictionary = test_runner.run_commands(file_path, test_id, commands_list) dictionary = io_utils.read_dict_from_file(file_path) print(json.dumps(dictionary) + "\n")
def start_deployment_with_templates(self, template, variables): http = HttpResponse() docker_utils = DockerUtils() token = token_hex(8) deployment_id = request.headers.get("Deployment-Id").lower( ) if request.headers.get("Deployment-Id") else token deploy_dir = f"{EnvInit.init.get(EnvConstants.DEPLOY_PATH)}/{deployment_id}" file = f"{deploy_dir}/docker-compose.yml" try: input_json = request.get_json(force=True) for key, value in input_json.items(): if key not in EnvironmentSingleton.get_instance().get_env(): EnvironmentSingleton.get_instance().set_env_var( str(key), str(value)) except Exception as e: app.logger.debug( f"Could not parse the input from the request as JSON: {e.__str__()}" ) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.TEMPLATE, template.strip()) EnvironmentSingleton.get_instance().set_env_var( EnvConstants.VARIABLES, variables.strip()) env_vars = EnvironmentSingleton.get_instance().get_env_and_virtual_env( ) app.logger.debug( {"msg": { "template_file": env_vars.get(EnvConstants.TEMPLATE) }}) app.logger.debug( {"msg": { "variables_file": env_vars.get(EnvConstants.VARIABLES) }}) status = CmdUtils.run_cmd_shell_false(["docker", "ps"]) if "Cannot connect to the Docker daemon".lower() in status.get( 'err').lower(): raise ApiExceptionDocker( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value, ErrorMessage.HTTP_CODE.get( ApiCode.DOCKER_DAEMON_NOT_RUNNING.value), status.get('err')) active_deployments = docker_utils.get_active_deployments() if len(active_deployments) >= EnvInit.init.get( EnvConstants.MAX_DEPLOYMENTS): raise ApiExceptionDocker( ApiCode.MAX_DEPLOYMENTS_REACHED.value, ErrorMessage.HTTP_CODE.get( ApiCode.MAX_DEPLOYMENTS_REACHED.value) % str(EnvInit.init.get(EnvConstants.MAX_DEPLOYMENTS)), active_deployments) try: r = Render(env_vars.get(EnvConstants.TEMPLATE), env_vars.get(EnvConstants.VARIABLES)) IOUtils.create_dir(deploy_dir) IOUtils.write_to_file(file, r.rend_template()) CmdUtils.run_cmd_detached( rf'''docker-compose -f {file} pull && docker-compose -f {file} up -d''' ) except Exception as e: raise ApiExceptionDocker( ApiCode.DEPLOY_START_FAILURE.value, ErrorMessage.HTTP_CODE.get(ApiCode.DEPLOY_START_FAILURE.value), e) DeploymentMetadataSingleton.get_instance() \ .delete_metadata_for_inactive_deployments(DockerUtils.get_active_deployments()) metadata = DeploymentReader.get_metadata_for_deployment( IOUtils.read_file(file=file)) IOUtils.write_to_file_dict(f"{deploy_dir}/metadata.json", metadata) DeploymentMetadataSingleton.get_instance().set_metadata_for_deployment( deployment_id, metadata) return Response(json.dumps( http.response(ApiCode.SUCCESS.value, ErrorMessage.HTTP_CODE.get(ApiCode.SUCCESS.value), deployment_id)), 200, mimetype="application/json")
import platform import psutil from rest.utils.io_utils import IOUtils properties = { "name": "Estuary-Discovery", "version": "4.2.4", "description": "Discover Estuary Apps registered with Eureka. Execute commands and tests with Estuary Agents.", "author": "Catalin Dinuta", "platforms": ["Linux", "Mac", "Windows"], "license": "Apache-2.0" } about_system = { "system": platform.system(), "platform": platform.platform(), "release": platform.release(), "version": platform.version(), "architecture": platform.architecture()[0], "machine": platform.machine(), "layer": "Docker" if IOUtils.does_file_exist("/.dockerenv") else "Machine", "hostname": platform.uname().node, "cpu": platform.processor(), "ram": str(round(psutil.virtual_memory().total / (1024.0**3))) + " GB", "python": platform.python_version() }