def env_update(env_name, target=None, config=None, aws_key=None, aws_secret=None): settings = get_project_settings() report_async({"command": f"dg env update"}, settings=settings, status="start") projectName = settings["project"]["name"] envDetails = api.get_environment_details(projectName, env_name) envPk = envDetails["pk"] data = {} if target is not None: data["target"] = target if config is not None: data["config_options"] = parse_env_config_options(config) cliOptions = parse_env_config_options(config) try: configOptions = read_env_config_from_file(env_name, overrideOptions=cliOptions) except yaml.YAMLError as ex: print(f"Could not parse config file: {exc}") return data["config_options"] = configOptions data["config_options"] = data["config_options"] if aws_key is not None: data["aws_key"] = aws_key if aws_secret is not None: data["aws_secret"] = aws_secret response = api.update_environment(projectName, envPk, data) Bcolors.okgreen("environment udpated succesfully") report_async({"command": f"dg env update"}, settings=settings, status="stop")
def env_push(env_name, service, remote, aws_key=None, aws_secret=None, tag="latest", prompt=False): action = "push" settings = get_project_settings() report_async({"command": f"dg env {action}"}, settings=settings, status="start") if service is None: questions = [ { 'type': 'list', 'name': 'service_name', 'message': 'Select Service', 'choices': settings["services"].keys(), }, ] answers = pyprompt(questions) service_key = answers["service_name"] else: service_key = service project_name = settings["project"]["name"] service_name = settings["services"][service_key]["service_name"] service_type = settings["services"][service_key]["service_type"] service_runtime = settings["services"][service_key]["lambda_runtime"] if service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"): envDetails = api.get_environment_details(project_name, env_name) envId = envDetails["pk"] response = api.get_last_infra_deployment_info(project_name, envId) infraDeploymentDetails = json.loads(response.content) if remote: os.environ["DOCKER_HOST"] = DOCKER_REMOTE_HOST docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"] region = infraDeploymentDetails["region"] registry_endpoint = docker_registry.split("/")[0] credentials = retreive_aws_creds(project_name, env_name, aws_key=aws_key, aws_secret=aws_secret, prompt=prompt) os.environ["AWS_ACCESS_KEY_ID"] = credentials["aws_key"] os.environ["AWS_SECRET_ACCESS_KEY"] = credentials["aws_secret"] proc = subprocess.run(["aws", "ecr", "get-login-password", "--region", region, ], capture_output=True) docker_auth = proc.stdout.decode("utf-8") subprocess.run(["docker", "login", "--username", "AWS", "--password", docker_auth, registry_endpoint], check=True) subprocess.run(["docker", "push", f"{docker_registry}:{tag}"], check=True) elif service_type == ServiceType.NEXTJS: print(f"ServiceType is NextJS, do nothing for now.") else: Bcolors.warn(f"This service: {service_type} does not support push command, skipping ...") sys.exit(0) report_async({"command": f"dg env {action}"}, settings=settings, status="complete")
def env_vars_list(env_name): """ List environment variables for an environment """ action = "vars:list" settings = get_project_settings() report_async({"command": f"dg env {action}"}, settings=settings, status="start") project_name = settings["project"]["name"] envDetails = api.get_environment_details(project_name, env_name) envId = envDetails["pk"] envVars = api.environment_vars_list(project_name, envId) envVars = json.loads(envVars.content)["results"] report_async({"command": f"dg env {action}"}, settings=settings, status="complete") pprint(envVars)
def env_cost(env_name): settings = get_project_settings() report_async({"command": f"dg env cost"}, settings=settings, status="start") projectName = settings["project"]["name"] envDetails = api.get_environment_details(projectName, env_name) envPk = envDetails["pk"] spinner = Halo(text="Estimating environment costs ...", spinner="dots") spinner.start() response = api.estimate_cost(projectName, envPk) spinner.stop() Bcolors.okgreen("Your cost estimates are shown below") print("--------------------------------") pprint(response.content) report_async({"command": f"dg env cost"}, settings=settings, status="complete")
def env_plan(env_name): settings = get_project_settings() report_async({"command": f"dg env plan"}, settings=settings, status="start") projectName = settings["project"]["name"] envDetails = api.get_environment_details(projectName, env_name) envPk = envDetails["pk"] spinner = Halo(text="Planning environment ...", spinner="dots") spinner.start() response = api.plan_environment(projectName, envPk) spinner.stop() Bcolors.okgreen("Your environment plan is shown below") print("--------------------------------") data = json.loads(response.content) pprint(data["output"]) report_async({"command": f"dg env plan"}, settings=settings, status="complete")
def env_destroy(env_name, project_name=None, aws_key=None, aws_secret=None, prompt=True): settings = get_project_settings() report_async({"command": f"dg env destroy"}, settings=settings, status="start") projectName = settings["project"]["name"] envDetails = api.get_environment_details(projectName, env_name) envPk = envDetails["pk"] response = api.destroy_environment(projectName, envPk, { "aws_key": aws_key, "aws_secret": aws_secret }) job = json.loads(response.content) if prompt: questions = [ { 'type': 'input', 'name': 'sure', 'message': 'Are you sure (Y/N)?' }, ] answers = pyprompt(questions) if answers["sure"] != "Y": Bcolors.fail("aborting") sys.exit(1) # loading until infra status is complete spinner = Halo(text="destroying infrastructure ...", spinner="dots") spinner.start() while True: statusResponse = api.get_infra_destroy_job_info(projectName, job['job_id']) print(statusResponse.content) jobStatus = json.loads(statusResponse.content) if jobStatus["status"] == "DESTROYED": break elif jobStatus["status"] == "FAILED": Bcolors.fail("Could not destroy infrastructure") print(jobStatus["fail_message"]) sys.exit(1) time.sleep(2) spinner.stop() print(f"Environment destroyed succesfully") report_async({"command": f"dg env destroy"}, settings=settings, status="complete")
def env_apply(env_name, verbose): settings = get_project_settings() report_async({"command": f"dg env apply"}, settings=settings, status="start") projectName = settings["project"]["name"] envDetails = api.get_environment_details(projectName, env_name) envPk = envDetails["pk"] response = api.apply_environment(projectName, envPk) job = json.loads(response.content) # loading until infra status is complete print("creating infrastructure ...") spinner = Halo(text="", spinner="dots") spinner.start() if verbose: with api.stream_deployment_logs(projectName, job['job_id']) as r: for line in r.iter_lines(): line = line.decode("utf-8") print(line) while True: statusResponse = api.get_infra_deployment_info(projectName, job['job_id']) print(statusResponse.content) jobStatus = json.loads(statusResponse.content) if jobStatus["status"] == "COMPLETED": break elif jobStatus["status"] == "FAILED": Bcolors.fail("Could not create infrastructure") print(jobStatus["fail_message"]) sys.exit(1) time.sleep(2) spinner.stop() print("Deployment successful!") print(f"your deployment details:") pprint(jobStatus["outputs"]) report_async({"command": f"dg env apply"}, settings=settings, status="complete")
def env_describe(env_name): settings = get_project_settings() report_async({"command": f"dg env details"}, settings=settings, status="start") project_name = settings["project"]["name"] env = api.get_environment_details(project_name, env_name) envId = env["pk"] response = api.get_last_infra_deployment_info(project_name, envId) infraDeploymentDetails = json.loads(response.content) print(f">> {env['name']}") print(f" -> pk={env['pk']}") print(f" -> target={env['target']}") print(f" -> region={env['region']}") print(f" -> config_options={env['config_options']}") print(f" -> aws_key={env['aws_key'][:4]}****{env['aws_key'][-4:]}") report_async({"command": f"dg env list"}, settings=settings, status="complete") pprint(infraDeploymentDetails) report_async({"command": f"dg env details"}, settings=settings, status="complete")
def perform_release(settings, env_name, service_key): project_name = settings["project"]["name"] service_name = settings["services"][service_key]["service_name"] service_type = settings["services"][service_key]["service_type"] service_path = settings["services"][service_key]["path"] service_runtime = settings["services"][service_key]["lambda_runtime"] envDetails = api.get_environment_details(project_name, env_name) envId = envDetails["pk"] region = envDetails["region"] # nextjs service doesn't have infra deployment stage if service_type != ServiceType.NEXTJS: response = api.get_last_infra_deployment_info(project_name, envId) infraDeploymentDetails = json.loads(response.content) credentials = retreive_aws_creds(project_name, env_name, aws_key=aws_key, aws_secret=aws_secret, prompt=prompt) awsKey = credentials["aws_key"] awsSecret = credentials["aws_secret"] envVars = {} #get_env_vars(env_name, service_key) spinner = Halo(text=f"deploying {service_name}...", spinner="dots") spinner.start() if service_type == ServiceType.WEBAPP: os.environ["AWS_ACCESS_KEY_ID"] = awsKey os.environ["AWS_SECRET_ACCESS_KEY"] = awsSecret build_directory = settings["services"][service_key]["build_directory"] # TODO: find better way to extract bucket name of webapp bucket_name = infraDeploymentDetails["terraform_outputs"][f"{service_name}_bucket_main"]["value"] subprocess.run(["aws", "s3", "sync", f"{build_directory}", f"s3://{bucket_name}"], check=True) Bcolors.okgreen("Upload succeeded!") elif service_type == ServiceType.NEXTJS: print(f"ServiceType is NextJS, do nothing for now.") elif service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"): docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"] lb_url = infraDeploymentDetails["outputs"]["services"][service_name]["lb_url"] region = infraDeploymentDetails["region"] response = api.deploy_to_infra({ "environment_pk": f"{envId}", "cluster_name": f"{project_name}-{env_name}", "service_name": f"{service_name}", "task_name": f"{project_name}-{env_name}-{service_name}", "region": region, "image_url": f"{docker_registry}:{tag}", "tag": tag, "aws_key": awsKey, "aws_secret": awsSecret, "env_vars": envVars }) output = json.loads(response.content) print(output["msg"]) print(f"your deployment URL: http://{lb_url}") elif service_type == ServiceType.SERVERLESS and service_runtime != "Docker": # perform deployment for lambda functions that are not using docker runtime if service_runtime == "Node.js": print("Installing packages ...") subprocess.run(["npm", "i", "--prefix", service_path]) elif service_runtime == "Python3.9": print("Installing packages ...") # needs more work .. we need to include python requirements folder into the zip path reqs_path = os.path.join(service_path, "requirements.txt") deps_path = service_path subprocess.run(["pip", "install", "--target", deps_path, "-r", reqs_path]) lambda_handler = settings["services"][service_key]["lambda_handler"] response = deploy_lambda_function_code( project_name, env_name, service_name, region, service_path, lambda_handler, awsKey, awsSecret ) print(f"lambda deployed successfully {response}") else: Bcolors.warn(f"Service type: {service_type} does not support release command, skipping ...") spinner.stop()
def env_build(env_name, service, remote, context=None, tag="latest"): action = "build" settings = get_project_settings() report_async({"command": f"dg env {action}"}, settings=settings, status="start") if service is None: defaultProjectName = os.path.basename(os.getcwd()) questions = [ { 'type': 'list', 'name': 'service_name', 'message': 'Select Service', 'choices': settings["services"].keys(), }, ] answers = pyprompt(questions) service_key = answers["service_name"] else: service_key = service project_name = settings["project"]["name"] service_name = settings["services"][service_key]["service_name"] service_type = settings["services"][service_key]["service_type"] service_runtime = settings["services"][service_key]["lambda_runtime"] service_path = settings["services"][service_key]["path"] envDetails = api.get_environment_details(project_name, env_name) envId = envDetails["pk"] exposeVarsAtBuild = envDetails["inject_env_variables_at_build_time"] if context is None: context = f"{service_path}/" envVars = api.environment_vars_list(project_name, envId) envVars = json.loads(envVars.content)["results"] serviceDetails = api.get_service_by_name(project_name, service_name) servicePk = serviceDetails["pk"] if service_type in [ServiceType.WEBAPP, ServiceType.NEXTJS]: build_command = settings["services"][service_key]["build_command"] envVarsWithOverrides = compute_env_vars_with_overrides(envVars, servicePk) # expose env variables for name, value in envVarsWithOverrides.items(): os.environ[name] = value # run it in service context subprocess.run(["npm", "install", "--prefix", context], check=True) print(f"build command to execute: {build_command}") # ensure that && separator works as expected for cmd in build_command.split("&&"): current_cmd = cmd.strip().split(" ") if current_cmd[0] == "npm": current_cmd = current_cmd + ["--prefix", context] subprocess.run(current_cmd, check=True) subprocess.run("pwd", check=True) subprocess.run("ls -a", check=True) elif service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"): dockerfile = settings["services"][service_key]["dockerfile"] response = api.get_last_infra_deployment_info(project_name, envId) infraDeploymentDetails = json.loads(response.content) docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"] if remote: os.environ["DOCKER_HOST"] = DOCKER_REMOTE_HOST buildArgs = [] if exposeVarsAtBuild: envVarsWithOverrides = compute_env_vars_with_overrides(envVars, servicePk) for name, value in envVarsWithOverrides.items(): os.environ[name] = value buildArgs = buildArgs + ["--build-arg", name] docker_build_command = ["docker", "build", "-t", f"{project_name}-{service_name}:{tag}"] + \ buildArgs + \ ["-f", f"{dockerfile}", context] subprocess.run(docker_build_command, check=True) subprocess.run(["docker", "tag", f"{project_name}-{service_name}:{tag}", f"{docker_registry}:{tag}"], check=True) else: Bcolors.warn(f"This service type does not support build phase: {service_type}, skipping ...") sys.exit(0) report_async({"command": f"dg env {action}"}, settings=settings, status="complete")
def env_vars_create(env_name, file, prompt=True, overwrite=False): """ Update environment variables for an environment based on .yml file --overwrite forces overwriting of existing variables """ action = "vars:create" if not os.path.exists(file): Bcolors.fail("File does not exist") sys.exit(1) settings = get_project_settings() report_async({"command": f"dg env {action}"}, settings=settings, status="start") project_name = settings["project"]["name"] if prompt and not overwrite: Bcolors.warn("Note: Environment update will fail if duplicate variables names exist. Proceed? (Y,N)") Bcolors.okgreen("Hint: If you wish to overwrite existing vars use the --overwrite option along with this command") answer = input() if answer.lower() != "y": Bcolors.fail("Aborting ...") sys.exit(1) try: varsToCreate = yload(open(file), Loader=Loader) except Exception as e: Bcolors.fail("Error while loading vars file") print(e) sys.exit(1) envDetails = api.get_environment_details(project_name, env_name) envId = envDetails["pk"] services = api.list_services(project_name) services = json.loads(services.content)["results"] servicesDict = {} for s in services: servicesDict[s["name"]] = s for serviceName, varItems in varsToCreate.items(): if serviceName == "all": servicePk = None else: if serviceName not in servicesDict.keys(): Bcolors.fail(f"serviceName not found in backend: {serviceName}") sys.exit(1) servicePk = servicesDict[serviceName]["pk"] Bcolors.okgreen(f"Creating vars for service: {serviceName}:") for varName, varValue in varItems.items(): Bcolors.okgreen(f"> Creating var ({varName}, {varValue}) ...") response = api.environment_vars_create( project_name, envId, varName, varValue, servicePk, overwrite=overwrite ) Bcolors.okgreen(f">> Created!") report_async({"command": f"dg env {action}"}, settings=settings, status="complete")