Exemplo n.º 1
0
def env_update(env_name, target=None, config=None, aws_key=None, aws_secret=None):
    settings = get_project_settings()
    report_async({"command": f"dg env update"}, settings=settings, status="start")

    projectName = settings["project"]["name"]
    envDetails = api.get_environment_details(projectName, env_name)
    envPk = envDetails["pk"]

    data = {}
    if target is not None:
        data["target"] = target
    if config is not None:
        data["config_options"] = parse_env_config_options(config)
        cliOptions = parse_env_config_options(config)
        try:
            configOptions = read_env_config_from_file(env_name, overrideOptions=cliOptions)
        except yaml.YAMLError as ex:
            print(f"Could not parse config file: {exc}")
            return
        data["config_options"] = configOptions
        data["config_options"] = data["config_options"]
    if aws_key is not None:
        data["aws_key"] = aws_key
    if aws_secret is not None:
        data["aws_secret"] = aws_secret

    response = api.update_environment(projectName, envPk, data)
    Bcolors.okgreen("environment udpated succesfully")
    report_async({"command": f"dg env update"}, settings=settings, status="stop")
Exemplo n.º 2
0
Arquivo: auth.py Projeto: diggerhq/cli
def fetch_github_token():
    webbrowser.open(GITHUB_LOGIN_ENDPOINT)
    token = ""
    while len(token) < 1:
        Bcolors.warn("Please follow browser and paste token here")
        token = input()
    save_github_token(token)
    Bcolors.okgreen("Authentication successful!")
Exemplo n.º 3
0
Arquivo: auth.py Projeto: diggerhq/cli
 def wrapper(ctx, *args, **kwargs):
     if not os.path.exists(DIGGERTOKEN_FILE_PATH) and \
             not os.environ.get(DIGGER_ENV_TOKEN_NAME, None):
         Bcolors.fail("Authentication required, please run `dg auth`")
         return
     # TODO: figure out why such ctx is not working
     # return ctx.invoke(func, ctx.obj, *args, **kwargs)
     return func(*args, **kwargs)
Exemplo n.º 4
0
 def update_state(self):
     for service in self.services:
         if service.type == ProjectDetector.DOCKER:
             self.generate_docker(service)
         elif service.type == ProjectDetector.FRONTEND:
             self.generate_frontend(service)
         else:
             Bcolors.warn(f"Unknown type {service.type}")
Exemplo n.º 5
0
def sync():
    """
    Sync all current services with backend
    """
    settings = get_project_settings()
    projectName = settings["project"]["name"]
    services = settings["services"]
    for key, service in services.items():
        service["name"] = service["service_name"]
    servicesList = json.dumps(list(services.values()))
    api.sync_services(projectName, {"services": servicesList})
    Bcolors.okgreen("digger.yml services synced with backend successfully")
Exemplo n.º 6
0
def env_push(env_name, service, remote, aws_key=None, aws_secret=None, tag="latest", prompt=False):
    action = "push"
    settings = get_project_settings()
    report_async({"command": f"dg env {action}"}, settings=settings, status="start")

    if service is None:
        questions = [
            {
                'type': 'list',
                'name': 'service_name',
                'message': 'Select Service',
                'choices': settings["services"].keys(),
            },
        ]

        answers = pyprompt(questions)

        service_key = answers["service_name"]
    else:
        service_key = service

    project_name = settings["project"]["name"]
    service_name = settings["services"][service_key]["service_name"]
    service_type = settings["services"][service_key]["service_type"]
    service_runtime = settings["services"][service_key]["lambda_runtime"]

    if service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"):
        envDetails = api.get_environment_details(project_name, env_name)
        envId = envDetails["pk"]
        response = api.get_last_infra_deployment_info(project_name, envId)
        infraDeploymentDetails = json.loads(response.content)

        if remote:
            os.environ["DOCKER_HOST"] = DOCKER_REMOTE_HOST

        docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"]
        region = infraDeploymentDetails["region"]
        registry_endpoint = docker_registry.split("/")[0]
        credentials = retreive_aws_creds(project_name, env_name, aws_key=aws_key, aws_secret=aws_secret, prompt=prompt)
        os.environ["AWS_ACCESS_KEY_ID"] = credentials["aws_key"]
        os.environ["AWS_SECRET_ACCESS_KEY"] = credentials["aws_secret"]
        proc = subprocess.run(["aws", "ecr", "get-login-password", "--region", region, ], capture_output=True)
        docker_auth = proc.stdout.decode("utf-8")
        subprocess.run(["docker", "login", "--username", "AWS", "--password", docker_auth, registry_endpoint], check=True)
        subprocess.run(["docker", "push", f"{docker_registry}:{tag}"], check=True)
    elif service_type == ServiceType.NEXTJS:
        print(f"ServiceType is NextJS, do nothing for now.")
    else:
        Bcolors.warn(f"This service: {service_type} does not support push command, skipping ...")
        sys.exit(0)

    report_async({"command": f"dg env {action}"}, settings=settings, status="complete")
Exemplo n.º 7
0
def do_api(method, endpoint, data, stream=False, auth_token=None):
    if auth_token is not None:
        headers = {"Authorization": f"Token {auth_token}"}
    else:
        headers = {}
    response = requests.request(method=method,
                                stream=stream,
                                url=endpoint,
                                json=data,
                                headers=headers)
    if response.status_code // 100 != 2:
        Bcolors.fail("Request failed")
        raise ApiRequestException(response.content)
    return response
Exemplo n.º 8
0
def env_cost(env_name):

    settings = get_project_settings()
    report_async({"command": f"dg env cost"}, settings=settings, status="start")
    projectName = settings["project"]["name"]
    envDetails = api.get_environment_details(projectName, env_name)
    envPk = envDetails["pk"]
    spinner = Halo(text="Estimating environment costs ...", spinner="dots")
    spinner.start()
    response = api.estimate_cost(projectName, envPk)
    spinner.stop()
    Bcolors.okgreen("Your cost estimates are shown below")
    print("--------------------------------")
    pprint(response.content)
    report_async({"command": f"dg env cost"}, settings=settings, status="complete")
Exemplo n.º 9
0
def project_init(name=None):
    action = "init"
    report_async({"command": f"dg project init"}, status="start")

    update_existing_yaml = False
    if os.path.exists("digger.yml"):
        Bcolors.warn("digger.yml found, would you like to initialize new project (Y/N)? ")
        answer = input()
        if answer.lower() == "n":
            Bcolors.fail("aborting ...")
            sys.exit(1)
        else:
            update_existing_yaml = True

    if name is None:
        defaultProjectName = os.path.basename(os.getcwd())
        questions = [
            {
                'type': 'input',
                'name': 'project_name',
                'message': 'Enter project name',
                'default': defaultProjectName,
                'validate': ProjectNameValidator
            },
        ]

        answers = pyprompt(questions)

        project_name = answers["project_name"]
    else:
        project_name = name

    # This will throw error if project name is invalid (e.g. project exists)
    api.create_project(project_name)

    spinner = Halo(text='Initializing project: ' + project_name, spinner='dots')
    spinner.start()
    if update_existing_yaml:
        settings = get_project_settings()
        settings["project"]["name"] = project_name
    else:
        settings = init_project(project_name)
    update_digger_yaml(settings)  
    spinner.stop()


    print("project initiated successfully")
    report_async({"command": f"dg project init"}, settings=settings, status="copmlete")
Exemplo n.º 10
0
def env_plan(env_name):

    settings = get_project_settings()
    report_async({"command": f"dg env plan"}, settings=settings, status="start")
    projectName = settings["project"]["name"]
    envDetails = api.get_environment_details(projectName, env_name)
    envPk = envDetails["pk"]
    spinner = Halo(text="Planning environment ...", spinner="dots")
    spinner.start()
    response = api.plan_environment(projectName, envPk)
    spinner.stop()
    Bcolors.okgreen("Your environment plan is shown below")
    print("--------------------------------")
    data = json.loads(response.content)
    pprint(data["output"])
    report_async({"command": f"dg env plan"}, settings=settings, status="complete")
Exemplo n.º 11
0
def env_destroy(env_name, project_name=None, aws_key=None, aws_secret=None, prompt=True):

    settings = get_project_settings()
    report_async({"command": f"dg env destroy"}, settings=settings, status="start")
    projectName = settings["project"]["name"]
    envDetails = api.get_environment_details(projectName, env_name)
    envPk = envDetails["pk"]
    response = api.destroy_environment(projectName, envPk, {
        "aws_key": aws_key,
        "aws_secret": aws_secret
    })
    job = json.loads(response.content)


    if prompt:
        questions = [
            {
                'type': 'input',
                'name': 'sure',
                'message': 'Are you sure (Y/N)?'
            },
        ]

        answers = pyprompt(questions)
        if answers["sure"] != "Y":
            Bcolors.fail("aborting")
            sys.exit(1)

    # loading until infra status is complete
    spinner = Halo(text="destroying infrastructure ...", spinner="dots")
    spinner.start()
    while True:
        statusResponse = api.get_infra_destroy_job_info(projectName, job['job_id'])
        print(statusResponse.content)
        jobStatus = json.loads(statusResponse.content)
        if jobStatus["status"] == "DESTROYED":
            break
        elif jobStatus["status"] == "FAILED":
            Bcolors.fail("Could not destroy infrastructure")
            print(jobStatus["fail_message"])
            sys.exit(1)
        time.sleep(2)
    spinner.stop()


    print(f"Environment destroyed succesfully")
    report_async({"command": f"dg env destroy"}, settings=settings, status="complete")
Exemplo n.º 12
0
def env_sync_tform(env_name):
    settings = get_project_settings()
    report_async({"command": f"dg env sync-tform"}, settings=settings, status="start")
    project_name = settings["project"]["name"]
    services = settings["services"]
    env_path = f"digger-master/{env_name}"
    tform_path = f"{env_path}/terraform"
    target = settings["environments"][env_name]["target"]
    region = settings["environments"][env_name]["region"]
    Path(env_path).mkdir(parents=True, exist_ok=True)
    Path(tform_path).mkdir(parents=True, exist_ok=True)
    shutil.rmtree(tform_path) 
    # tform generation
    spinner = Halo(text="Updating terraform ...", spinner="dots")
    spinner.start()
    download_terraform_files(project_name, env_name, region, target, services, tform_path)
    spinner.stop()
    Bcolors.okgreen("Terraform updated successfully")        
    report_async({"command": f"dg env sync-tform"}, settings=settings, status="complete")
Exemplo n.º 13
0
def env_apply(env_name, verbose):

    settings = get_project_settings()
    report_async({"command": f"dg env apply"}, settings=settings, status="start")
    projectName = settings["project"]["name"]
    envDetails = api.get_environment_details(projectName, env_name)
    envPk = envDetails["pk"]
    response = api.apply_environment(projectName, envPk)
    job = json.loads(response.content)

    # loading until infra status is complete
    print("creating infrastructure ...")
    spinner = Halo(text="", spinner="dots")
    spinner.start()

    if verbose:
        with api.stream_deployment_logs(projectName, job['job_id']) as r:
            for line in r.iter_lines():
                line = line.decode("utf-8")
                print(line)

    while True:
        statusResponse = api.get_infra_deployment_info(projectName, job['job_id'])
        print(statusResponse.content)
        jobStatus = json.loads(statusResponse.content)


        if jobStatus["status"] == "COMPLETED":
            break
        elif jobStatus["status"] == "FAILED":
            Bcolors.fail("Could not create infrastructure")
            print(jobStatus["fail_message"])
            sys.exit(1)
        time.sleep(2)
    spinner.stop()


    print("Deployment successful!")
    print(f"your deployment details:")
    pprint(jobStatus["outputs"])

    report_async({"command": f"dg env apply"}, settings=settings, status="complete")
Exemplo n.º 14
0
def env_list(project_name=None):
    settings = get_project_settings()
    report_async({"command": f"dg env list"}, settings=settings, status="start")

    if project_name is None:
        if "project" not in settings:
            Bcolors.fail("could not load project name from settings")
            Bcolors.fail("please pass project via --project-name parameter")
            sys.exit(1)
        project_name = settings["project"]["name"]

    response = api.get_project_environments(project_name)
    environments = json.loads(response.content)["results"]

    for env in environments:
        print(f">> {env['name']}")
        print(f"  -> pk={env['pk']}")
        print(f"  -> target={env['target']}")
        print(f"  -> region={env['region']}")
        print(f"  -> config_options={env['config_options']}")
        print(f"  -> aws_key={env['aws_key'][:4]}****{env['aws_key'][-4:]}")
    report_async({"command": f"dg env list"}, settings=settings, status="complete")
Exemplo n.º 15
0
def service_add():
    action = "add"
    report_async({"command": f"dg service add"}, status="start")
    # service_names = get_service_names()
    service_names = list(filter(lambda x: x != "digger-master" and os.path.isdir(x) and not x.startswith("."), os.listdir(os.getcwd())))

    if len(service_names) == 0:
        Bcolors.fail("No service directories found, try cloning a repo in here!")
        return

    questions = [
        {
            'type': 'list',
            'name': 'service_name',
            'message': 'select repository',
            'choices': service_names
        },
    ]

    answers = pyprompt(questions)
    service_name = answers["service_name"]
    service_key = service_name

    service_path = service_name
    serviceNameOk = re.fullmatch(r'', service_name)
    if not serviceNameOk:
        Bcolors.warn("service names should be lowercase letters, hiphens and at most 10 characters")
        service_name = transform_service_name(service_name)
        Bcolors.warn(f"Updating name to: {service_name}")

    settings = get_project_settings()

    try:
        dockerfile_path = find_dockerfile(service_path)
    except CouldNotDetermineDockerLocation as e:
        print("Could not find dockerfile in root")
        dockerfile_path = dockerfile_manual_entry(service_path)


    settings["services"] = settings.get("services", {})
    settings["services"][service_key] = {
        "service_name": service_name,
        "path": service_path,
        "env_files": [],
        "publicly_accessible": True,
        "service_type": "container",
        "container_port": 8080,
        "health_check": "/",
        "dockerfile": dockerfile_path,
        "resources": {},
        "dependencies": {},
    }

    update_digger_yaml(settings)
    spin(1, "Updating DGL config ... ")

    print("Service added succesfully")
    report_async({"command": f"dg service add"}, settings=settings, status="complete")
Exemplo n.º 16
0
def project_generate_yml(name=None):
    action = "init"
    report_async({"command": f"dg project generate"}, status="start")

    update_existing_yaml = False
    if os.path.exists("digger.yml"):
        Bcolors.warn("digger.yml found, please remove before running command")
        sys.exit(0)

    if name is None:
        defaultProjectName = os.path.basename(os.getcwd())
        questions = [
            {
                'type': 'input',
                'name': 'project_name',
                'message': 'Enter project name',
                'default': defaultProjectName,
                'validate': ProjectNameValidator
            },
        ]

        answers = pyprompt(questions)

        project_name = answers["project_name"]
    else:
        project_name = name

    spinner = Halo(text='Generating project: ' + project_name, spinner='dots')
    spinner.start()
    response = api.generate_project(project_name)
    settings = json.loads(response.content)
    f = open(digger_yaml(), "w")
    ydump(settings, f)
    spinner.stop()

    print("project generated successfully")
    report_async({"command": f"dg project generate"}, settings=settings, status="complete")
Exemplo n.º 17
0
    def detect_service(self, path):

        Bcolors.warn("... Searching for digger.yml")
        dgtest = self.digger_test(path)
        if dgtest != False:
            Bcolors.okgreen("digger.yml file found .. loading settings")
            return dgtest
        Bcolors.warn("[x] digger.yml not found")

        Bcolors.warn("... Searching for dockerfile")
        dockertest = self.docker_test(path)
        if dockertest != False:
            return dockertest
        Bcolors.warn("[x] dockerfile not found")

        Bcolors.warn("... Searching for package.json")
        jstest = self.javascript_test(path)
        if jstest != False:
            return jstest
        Bcolors.warn("[x] package.json not found")

        return Service(self.UNKNOWN, None)
Exemplo n.º 18
0
    def perform_release(settings, env_name, service_key):
        project_name = settings["project"]["name"]
        service_name = settings["services"][service_key]["service_name"]
        service_type = settings["services"][service_key]["service_type"]
        service_path = settings["services"][service_key]["path"]
        service_runtime = settings["services"][service_key]["lambda_runtime"]
        envDetails = api.get_environment_details(project_name, env_name)
        envId = envDetails["pk"]
        region = envDetails["region"]

        # nextjs service doesn't have infra deployment stage
        if service_type != ServiceType.NEXTJS:
            response = api.get_last_infra_deployment_info(project_name, envId)
            infraDeploymentDetails = json.loads(response.content)
        credentials = retreive_aws_creds(project_name, env_name, aws_key=aws_key, aws_secret=aws_secret, prompt=prompt)
        awsKey = credentials["aws_key"]
        awsSecret = credentials["aws_secret"]
        envVars = {} #get_env_vars(env_name, service_key)

        spinner = Halo(text=f"deploying {service_name}...", spinner="dots")
        spinner.start()
        if service_type == ServiceType.WEBAPP:
            os.environ["AWS_ACCESS_KEY_ID"] = awsKey
            os.environ["AWS_SECRET_ACCESS_KEY"] = awsSecret
            build_directory = settings["services"][service_key]["build_directory"]
            # TODO: find better way to extract bucket name of webapp
            bucket_name = infraDeploymentDetails["terraform_outputs"][f"{service_name}_bucket_main"]["value"]

            subprocess.run(["aws", "s3", "sync", f"{build_directory}",  f"s3://{bucket_name}"], check=True)

            Bcolors.okgreen("Upload succeeded!")
        elif service_type == ServiceType.NEXTJS:
            print(f"ServiceType is NextJS, do nothing for now.")
        elif service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"):
            docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"]
            lb_url = infraDeploymentDetails["outputs"]["services"][service_name]["lb_url"]
            region = infraDeploymentDetails["region"]

            response = api.deploy_to_infra({
                "environment_pk": f"{envId}",
                "cluster_name": f"{project_name}-{env_name}",
                "service_name": f"{service_name}",
                "task_name": f"{project_name}-{env_name}-{service_name}",
                "region": region,
                "image_url": f"{docker_registry}:{tag}",
                "tag": tag,
                "aws_key": awsKey,
                "aws_secret": awsSecret,
                "env_vars": envVars
            })

            output = json.loads(response.content)

            print(output["msg"])
            print(f"your deployment URL: http://{lb_url}")
        elif service_type == ServiceType.SERVERLESS and service_runtime != "Docker":
            # perform deployment for lambda functions that are not using docker runtime
            if service_runtime == "Node.js":
                print("Installing packages ...")
                subprocess.run(["npm", "i", "--prefix", service_path])
            elif service_runtime == "Python3.9":
                print("Installing packages ...")
                # needs more work .. we need to include python requirements folder into the zip path
                reqs_path = os.path.join(service_path, "requirements.txt")
                deps_path = service_path
                subprocess.run(["pip", "install", "--target", deps_path, "-r", reqs_path])

            lambda_handler = settings["services"][service_key]["lambda_handler"]
            response = deploy_lambda_function_code(
                project_name,
                env_name,
                service_name,
                region,
                service_path,
                lambda_handler,
                awsKey,
                awsSecret
            )
            print(f"lambda deployed successfully {response}")
            
        else:
            Bcolors.warn(f"Service type: {service_type} does not support release command, skipping ...")

        spinner.stop()
Exemplo n.º 19
0
def env_build(env_name, service, remote, context=None, tag="latest"):
    action = "build"
    settings = get_project_settings()
    report_async({"command": f"dg env {action}"}, settings=settings, status="start")


    if service is None:
        defaultProjectName = os.path.basename(os.getcwd())
        questions = [
            {
                'type': 'list',
                'name': 'service_name',
                'message': 'Select Service',
                'choices': settings["services"].keys(),
            },
        ]

        answers = pyprompt(questions)

        service_key = answers["service_name"]
    else:
        service_key = service

    project_name = settings["project"]["name"]
    service_name = settings["services"][service_key]["service_name"]
    service_type = settings["services"][service_key]["service_type"]
    service_runtime = settings["services"][service_key]["lambda_runtime"]
    service_path = settings["services"][service_key]["path"]
    envDetails = api.get_environment_details(project_name, env_name)
    envId = envDetails["pk"]
    exposeVarsAtBuild = envDetails["inject_env_variables_at_build_time"]

    if context is None:
        context = f"{service_path}/"
    
    envVars = api.environment_vars_list(project_name, envId)
    envVars = json.loads(envVars.content)["results"]
    
    serviceDetails = api.get_service_by_name(project_name, service_name)
    servicePk = serviceDetails["pk"]

    if service_type in [ServiceType.WEBAPP, ServiceType.NEXTJS]:
        build_command = settings["services"][service_key]["build_command"]

        envVarsWithOverrides = compute_env_vars_with_overrides(envVars, servicePk)
        # expose env variables
        for name, value in envVarsWithOverrides.items():
            os.environ[name] = value

        # run it in service context
        subprocess.run(["npm", "install", "--prefix", context], check=True)

        print(f"build command to execute: {build_command}")
        # ensure that && separator works as expected
        for cmd in build_command.split("&&"):
            current_cmd = cmd.strip().split(" ")
            if current_cmd[0] == "npm":
                current_cmd = current_cmd + ["--prefix", context]
            subprocess.run(current_cmd, check=True)

        subprocess.run("pwd", check=True)
        subprocess.run("ls -a", check=True)

    elif service_type == ServiceType.CONTAINER or (service_type == ServiceType.SERVERLESS and service_runtime == "Docker"):
        dockerfile = settings["services"][service_key]["dockerfile"]
        response = api.get_last_infra_deployment_info(project_name, envId)
        infraDeploymentDetails = json.loads(response.content)
        docker_registry = infraDeploymentDetails["outputs"]["services"][service_name]["docker_registry"]

        if remote:
            os.environ["DOCKER_HOST"] = DOCKER_REMOTE_HOST

        buildArgs = []
        if exposeVarsAtBuild:
            envVarsWithOverrides = compute_env_vars_with_overrides(envVars, servicePk)

            for name, value in envVarsWithOverrides.items():
                os.environ[name] = value
                buildArgs = buildArgs + ["--build-arg", name]

        docker_build_command = ["docker", "build", "-t", f"{project_name}-{service_name}:{tag}"] + \
                               buildArgs + \
                               ["-f", f"{dockerfile}", context]

        subprocess.run(docker_build_command, check=True)
        subprocess.run(["docker", "tag", f"{project_name}-{service_name}:{tag}", f"{docker_registry}:{tag}"], check=True)
    else:
        Bcolors.warn(f"This service type does not support build phase: {service_type}, skipping ...")
        sys.exit(0)

    report_async({"command": f"dg env {action}"}, settings=settings, status="complete")
Exemplo n.º 20
0
def env_vars_create(env_name, file, prompt=True, overwrite=False):
    """
        Update environment variables for an environment based on .yml file
        --overwrite forces overwriting of existing variables
    """
    action = "vars:create"
    if not os.path.exists(file):
        Bcolors.fail("File does not exist")
        sys.exit(1)

    settings = get_project_settings()
    report_async({"command": f"dg env {action}"}, settings=settings, status="start")

    project_name = settings["project"]["name"]
    if prompt and not overwrite:
        Bcolors.warn("Note: Environment update will fail if duplicate variables names exist. Proceed? (Y,N)")
        Bcolors.okgreen("Hint: If you wish to overwrite existing vars use the --overwrite option along with this command")

        answer = input()
        if answer.lower() != "y":
            Bcolors.fail("Aborting ...")
            sys.exit(1)

    try:
        varsToCreate = yload(open(file), Loader=Loader)
    except Exception as e:
        Bcolors.fail("Error while loading vars file")
        print(e)
        sys.exit(1)

    envDetails = api.get_environment_details(project_name, env_name)
    envId = envDetails["pk"]

    services = api.list_services(project_name)
    services = json.loads(services.content)["results"]
    servicesDict = {}
    for s in services:
        servicesDict[s["name"]] = s

    for serviceName, varItems in varsToCreate.items():
        if serviceName == "all":
            servicePk = None
        else:
            if serviceName not in servicesDict.keys():
                Bcolors.fail(f"serviceName not found in backend: {serviceName}")
                sys.exit(1)
            servicePk = servicesDict[serviceName]["pk"]

        Bcolors.okgreen(f"Creating vars for service: {serviceName}:")
        for varName, varValue in varItems.items():
            Bcolors.okgreen(f"> Creating var ({varName}, {varValue}) ...")
            response = api.environment_vars_create(
                project_name, 
                envId, 
                varName, 
                varValue, 
                servicePk,
                overwrite=overwrite
            )
            Bcolors.okgreen(f">> Created!")


    report_async({"command": f"dg env {action}"}, settings=settings, status="complete")
Exemplo n.º 21
0
def env_create(
    env_name, 
    target=None,
    region=None,
    aws_key=None,
    aws_secret=None,
    config=[],
    prompt=True
):

    try:
        env_name_validate(env_name)
    except ValueError as e:
        Bcolors.warn(str(e))
        sys.exit()

    # parsing config options

    cliOptions = parse_env_config_options(config)
    try:
        configOptions = read_env_config_from_file(env_name, overrideOptions=cliOptions)
    except yaml.YAMLError as ex:
        print(f"Could not read config file: {exc}")
        return

    targets = DiggerTargets.TARGETS
    settings = get_project_settings()
    report_async({"command": f"dg env create"}, settings=settings, status="start")
    project_name = settings["project"]["name"]

    if target is None:
        questions = [
            {
                'type': 'list',
                'name': 'target',
                'message': 'Select target',
                'choices': targets.keys()
            },
        ]

        answers = pyprompt(questions)
        target_key = answers["target"]
        target = targets[target_key]

        if target == "other":

            ok = "n"
            while (ok.lower() != "y"):
                print("Enter target: ", end="")
                target = input()
                print(f"Confirm Target {target} (Y/N)?", end="")
                ok = input()

        elif target_key not in [DiggerTargets.FARGATE, DiggerTargets.LAMBDA]:
            Bcolors.fail("This option is currently unsupported! Please try again")
            return
    else:
        # use target from cli arg
        target = target

    if region is None:
        questions = [
            {
                'type': 'list',
                'name': 'region',
                'message': 'Select region',
                'choices': AWS_REGIONS,
                'default': "us-east-1"
            },
        ]
        answers = pyprompt(questions)
        region = answers["region"]

    if region not in AWS_REGIONS:
        Bcolors.fail("This region is not valid! Please try again")
        return

    credentials = retreive_aws_creds(project_name, env_name, aws_key=aws_key, aws_secret=aws_secret, prompt=prompt)
    aws_key = credentials["aws_key"]
    aws_secret = credentials["aws_secret"]

    spinner = Halo(text="Creating environment", spinner="dots")
    spinner.start()

    response = api.create_environment(project_name, {
        "name": env_name,
        "target": target,
        "region": region,
        "aws_key": aws_key,
        "aws_secret": aws_secret,
        "config_options": configOptions
    })
    spinner.stop()

    Bcolors.okgreen("Environment created successfully")
    Bcolors.okgreen(f"Use this command to run it: dg env apply {env_name}")