Ejemplo n.º 1
0
    def create(self, request, *args, **kwargs):
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        project = self.get_object()

        client = get_boto3_client("ssm", project.environment.get_creds())
        added_keys = []
        for service in project.services.filter(is_deleted=False):
            key_name = (
                f"{service.get_ssm_prefix()}{serializer.validated_data['key_name']}"
            )

            logger.info("Adding new variable: %s service_id=%s", key_name,
                        service.id)

            client.put_parameter(
                Name=key_name,
                Value=serializer.validated_data["key_value"],
                Type="SecureString" if serializer.validated_data.get(
                    "is_secure", True) else "String",
                Overwrite=True,
            )

            added_keys.append(key_name)

        headers = self.get_success_headers(serializer.data)
        return Response(added_keys,
                        status=status.HTTP_201_CREATED,
                        headers=headers)
Ejemplo n.º 2
0
    def get_env_vars(self):
        """Get service env vars.

        Returns
        -------
        list of dict
        """
        client = get_boto3_client("ssm", self.project.environment.get_creds())
        env_vars = []
        for parameters in self.env_vars_generator(client):
            for param in parameters:
                param_details = client.get_parameter(
                    Name=param["Name"], WithDecryption=True
                )["Parameter"]
                env_vars.append(
                    dict(
                        name=param["Name"].split("/")[-1],
                        value_from=param["Name"],
                        value=param_details["Value"],
                        arn=param_details["ARN"],
                        kind=param_details["Type"],
                        last_modified=param["LastModifiedDate"],
                    )
                )

        return env_vars
Ejemplo n.º 3
0
def create_ssh_key(name: str, aws_creds: AwsCredentials) -> str:
    """Create new ssh key and return it's pem file.

    Parameters
    ----------
    name : str
        created key will have this name
    aws_creds : AwsCredentials
        aws credentials to create a connection via boto

    Returns
    -------
    str
        local path to pem key
    """
    pem_key_path = os.path.join(KEYS_DIR, f"{name}.pem")
    if os.path.exists(pem_key_path):
        logger.info("Pem key %s is in our system", name)
        return name

    ec2_client = get_boto3_client("ec2", aws_creds)
    key_pair = ec2_client.create_key_pair(KeyName=name)

    with open(
        os.path.join(KEYS_DIR, f"{key_pair['KeyName']}_fingerprint.txt"), "w"
    ) as fingerprint_file:
        fingerprint_file.write(key_pair["KeyFingerprint"])

    key_pair_path = os.path.join(KEYS_DIR, f"{key_pair['KeyName']}.pem")
    with open(key_pair_path, "w") as key_file:
        key_file.write(key_pair["KeyMaterial"])

    return name
Ejemplo n.º 4
0
def put_task_definition(creds: AwsCredentials, params: GeneralConfiguration,
                        deploy_conf: DeploymentConf) -> Any:
    """Pushes task definition for the specified service."""
    logger.info("Put ECS task definition for: %s", DEMO_APP)

    client = get_boto3_client("ecs", creds)
    if deploy_conf.secrets:
        secrets = [
            dict(name=s.name, valueFrom=s.value_from)
            for s in deploy_conf.secrets
        ]
    else:
        secrets = []

    task_definition = client.register_task_definition(
        family=deploy_conf.service_name,
        executionRoleArn=deploy_conf.ecs_executor_role_arn,
        memory="250",
        containerDefinitions=[{
            "name":
            deploy_conf.service_name,
            "image":
            f"{deploy_conf.repo_url}:{deploy_conf.version}",
            "portMappings": [{
                "containerPort": deploy_conf.container_port,
                "protocol": "tcp"
            }],
            "secrets":
            secrets,
            "essential":
            True,
            "logConfiguration": {
                "logDriver": "awslogs",
                "options": {
                    "awslogs-group":
                    f"{params.env_name}/{params.project_name}",
                    "awslogs-create-group": "true",
                    "awslogs-region": creds.region,
                    "awslogs-stream-prefix": f"{deploy_conf.service_name}",
                },
            },
        }],
        tags=[
            {
                "key": "Environment",
                "value": params.env_name
            },
            {
                "key": "Project",
                "value": "demo-api"
            },
        ],
    )

    logger.info(
        "Success putting ECS task definition: %s",
        task_definition["taskDefinition"]["taskDefinitionArn"],
    )
    return task_definition
Ejemplo n.º 5
0
def wait_for_service_scale(
    creds: AwsCredentials,
    cluster: str,
    service_name: str,
    desired_count: int,
    timeout_seconds: int = 1800,
    task_definition_arn: str = None,
) -> None:
    """Wait for service scale."""
    logger.info(
        "wait_for_service_scale cluster=%s service_name=%s desired_count=%s timeout_seconds=%s task_definition_arn=%s",
        cluster,
        service_name,
        desired_count,
        timeout_seconds,
        task_definition_arn,
    )

    client = get_boto3_client("ecs", creds)
    waited_seconds = 0
    while waited_seconds <= timeout_seconds:
        logger.info(
            "Checking if service scaled to %s waited_seconds=%s",
            desired_count,
            waited_seconds,
        )
        resp = client.describe_services(cluster=cluster,
                                        services=[service_name])
        if not resp["services"]:
            logger.error("Service not found")
            return

        service = resp["services"][0]
        if task_definition_arn:
            logger.info("Selecting service with task definition arn: %s",
                        task_definition_arn)
            service = [
                d for d in service["deployments"]
                if d["taskDefinition"] == task_definition_arn
            ][0]

        logger.info("Scaling checking scale for service: %s", service)

        if int(service["runningCount"]) == desired_count:
            logger.info(
                "Services %s scaled to desired count of %d",
                service_name,
                desired_count,
            )
            return

        sleep(1)
        waited_seconds += 1

    raise Exception(f"Service {service_name} scale to "
                    f"desired count of {desired_count} TIMED OUT")
Ejemplo n.º 6
0
def launch_task_in_cluster(
    creds: AwsCredentials,
    deploy_conf: DeploymentConf,
    task_definition: Any,
) -> Any:
    """Launch service in cluster."""
    logger.info(
        "Creating service for task definition: %s",
        task_definition["taskDefinition"]["taskDefinitionArn"],
    )
    client = get_boto3_client("ecs", creds)
    try:
        logger.info("Checking existing services")
        resp = client.describe_services(cluster=deploy_conf.ecs_cluster,
                                        services=[deploy_conf.service_name])
        if resp["services"]:
            logger.info("Updating existing %s service",
                        deploy_conf.service_name)
            client.update_service(
                cluster=deploy_conf.ecs_cluster,
                service=deploy_conf.service_name,
                taskDefinition=task_definition["taskDefinition"]
                ["taskDefinitionArn"],
                desiredCount=1,
                deploymentConfiguration={
                    "maximumPercent": 200,
                    "minimumHealthyPercent": 100,
                },
                forceNewDeployment=True,
            )
            return
    except botocore.exceptions.ClientError:
        pass

    logger.info("Deploying new %s service", deploy_conf.service_name)
    response = client.create_service(
        cluster=deploy_conf.ecs_cluster,
        serviceName=deploy_conf.service_name,
        taskDefinition=task_definition["taskDefinition"]["taskDefinitionArn"],
        desiredCount=1,
        launchType="EC2",
        schedulingStrategy="REPLICA",
        deploymentController={"type": "ECS"},  # rolling update
        deploymentConfiguration={
            "maximumPercent": 200,
            "minimumHealthyPercent": 100
        },
        loadBalancers=[{
            "targetGroupArn": deploy_conf.target_group_arn,
            "containerName": deploy_conf.service_name,
            "containerPort": deploy_conf.container_port,
        }],
    )
    logger.debug("ECS create service response: %s", response)
Ejemplo n.º 7
0
def get_ecs_ami_id(aws_creds: AwsCredentials) -> str:
    """Retrieve the id of the official aws ecs optimized image for the region.

    Parameters
    ----------
    aws_creds: AwsCredentials
        aws credentials

    Returns
    -------
    str:
        aws ecs optimized ami id
    """
    ssm_client = get_boto3_client("ssm", aws_creds)
    param = ssm_client.get_parameter(
        Name="/aws/service/ecs/optimized-ami/amazon-linux-2/recommended/image_id",
    )
    return str(param["Parameter"]["Value"])
Ejemplo n.º 8
0
def remove_ecs_service(creds: AwsCredentials,
                       deploy_conf: DeploymentConf) -> None:
    """Remove ecs service from cluster."""
    logger.info("Removing %s from cluster %s", deploy_conf.service_name,
                deploy_conf.ecs_cluster)
    client = get_boto3_client("ecs", creds)

    logger.info("Get task definitions: %s", deploy_conf.service_name)
    task_definitions = client.list_task_definitions(
        familyPrefix=deploy_conf.service_name,
        sort="DESC",
    )
    if not task_definitions["taskDefinitionArns"]:
        logger.info("Found 0 task definitions for family %s",
                    deploy_conf.service_name)
    else:
        logger.info("Scaling down %s", SERVICE_NAME)
        client.update_service(
            cluster=deploy_conf.ecs_cluster,
            service=deploy_conf.service_name,
            taskDefinition=task_definitions["taskDefinitionArns"][0],
            desiredCount=0,
            deploymentConfiguration={
                "maximumPercent": 200,
                "minimumHealthyPercent": 100,
            },
            forceNewDeployment=True,
        )
        wait_for_service_scale(creds, deploy_conf.ecs_cluster,
                               deploy_conf.service_name, 0)

    logger.info("Deleting service %s", deploy_conf.service_name)
    client.delete_service(cluster=deploy_conf.ecs_cluster,
                          service=deploy_conf.service_name,
                          force=True)

    logger.info("Removing task definitions")
    for task_definition in task_definitions["taskDefinitionArns"]:
        client.deregister_task_definition(taskDefinition=task_definition)
        logger.info("Deregisted %s", task_definition)

    logger.info("Service removed")
Ejemplo n.º 9
0
    def destroy(self, request, *args, **kwargs):
        if not request.data["key_name"]:
            return Response(
                data=dict(detail="Must provide key name"),
                status=status.HTTP_400_BAD_REQUEST,
            )

        service = self.get_object()
        client = get_boto3_client("ssm",
                                  service.project.environment.get_creds())

        key_name = f"{service.get_ssm_prefix()}{request.data['key_name']}"
        try:
            client.delete_parameter(Name=key_name)
            return Response(status=status.HTTP_204_NO_CONTENT)
        except Exception:
            logger.exception("Failed to delete env vars: %s", key_name)
            return Response(
                data={
                    "detail":
                    "Error deleting key. Does this key exist? Check key name and try again."
                },
                status=status.HTTP_400_BAD_REQUEST,
            )
Ejemplo n.º 10
0
    def create(self, request, *args, **kwargs):
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        service = self.get_object()

        client = get_boto3_client("ssm",
                                  service.project.environment.get_creds())

        key_name = f"{service.get_ssm_prefix()}{serializer.validated_data['key_name']}"
        logger.info("Adding new variable: %s", key_name)
        # fmt: off
        client.put_parameter(
            Name=key_name,
            Value=serializer.validated_data["key_value"],
            Type="SecureString" if serializer.validated_data.get(
                "is_secure", True) else "String",
            Overwrite=True,
        )
        # fmt: on

        headers = self.get_success_headers(serializer.data)
        return Response(dict(key_name=key_name),
                        status=status.HTTP_201_CREATED,
                        headers=headers)
Ejemplo n.º 11
0
def launch_build_worker(build_worker_id, exec_log_id):
    logger.info(
        "Launching build worker for build_worker_id=%s exec_log_id=%s",
        build_worker_id,
        exec_log_id,
    )
    worker = (BuildWorker.objects.select_related(
        "service",
        "service__project", "service__project__environment").select_related(
            "service", "project").get(id=build_worker_id))
    exec_log = ExecutionLog.objects.get(id=exec_log_id)

    creds = worker.project.environment.get_creds()
    common_conf = worker.project.get_common_conf(exec_log_id,
                                                 worker.service_id)
    build_worker_conf = BuildWorkerConfigs(
        ssh_key_name=worker.project.get_ssh_key_name(),
        aws_access_key_id=creds.access_key,
        aws_access_key_secret=creds.secret_key,
        env_name=worker.project.environment.name,
        code_version=exec_log.get_params()["version"],
        service_name=worker.service.name,
        dockerfile=worker.service.default_dockerfile_path,
        dockerfile_target=worker.service.default_dockerfile_target,
        ecr_url=worker.service.conf().ecr_repo_url,
        valid_until=(datetime.utcnow() + timedelta(minutes=5)).replace(
            tzinfo=timezone.utc).isoformat(),
    )

    try:
        infra = launch_build_worker_server(creds, common_conf,
                                           build_worker_conf)
        ec2_client = get_boto3_client("ec2", creds)
        waiter = ec2_client.get_waiter("instance_status_ok")
        waiter.wait(
            InstanceIds=[infra["instance_id"]["value"]],
            Filters=[{
                "Name": "instance-state-code",
                "Values": ["16"]
            }],
            WaiterConfig={
                "Delay": 15,
                "MaxAttempts": 20,
            },
        )
    except:
        logger.exception(
            "Failed to launch build worker build_worker_id=%s exec_log_id=%s",
            build_worker_id,
            exec_log_id,
        )

        exec_log.mark_result(False)
        return False

    worker.launched_at = datetime.utcnow().replace(tzinfo=timezone.utc)
    worker.instance_id = infra["instance_id"]["value"]
    worker.public_ip = infra["instance_public_ip"]["value"]
    worker.ssh_key_name = worker.project.get_ssh_key_name()
    worker.save()

    exec_log.mark_result(True)

    logger.info(
        "Launched build worker build_worker_id=%s exec_log_id=%s",
        worker.id,
        exec_log_id,
    )
    return True
Ejemplo n.º 12
0
    def post(self, request, *args, **kwargs):
        service = self.get_object()
        if not service.is_ready():
            return Response(
                data={"detail": "service is not ready"},
                status=status.HTTP_400_BAD_REQUEST,
            )

        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)

        worker, created = BuildWorker.objects.get_or_create(
            service=service,
            organization=self.request.user.organization,
            project=service.project,
            is_deleted=False,
        )
        if not created and worker.instance_id:
            ec2_client = get_boto3_client(
                "ec2", service.project.environment.get_creds()
            )
            try:
                resp = ec2_client.describe_instance_status(
                    Filters=[
                        {
                            "Name": "instance-state-code",
                            "Values": ["16"],
                        },  # running state
                    ],
                    InstanceIds=[worker.instance_id,],
                )
                if len(resp["InstanceStatuses"]) == 1:
                    logger.info("Worker is still alive. worker_id=%s", worker.id)
                    return Response(
                        data=dict(build=worker.slug, log=None),
                        status=status.HTTP_200_OK,
                    )
            except ClientError as e:
                error_code = e.response.get("Error", {}).get("Code")
                logger.debug("client error: %s", e.response)
                logger.warning("error checking instance status: %s", error_code)

        worker.is_deleted = True
        worker.deleted_at = datetime.utcnow().replace(tzinfo=timezone.utc)
        worker.save(update_fields=["is_deleted", "deleted_at"])

        worker = BuildWorker.objects.create(
            service=service,
            organization=self.request.user.organization,
            project=service.project,
            is_deleted=False,
        )

        exec_log = ExecutionLog.register(
            self.request.user.organization,
            ExecutionLog.ActionTypes.create,
            request.data,
            ExecutionLog.Components.build_worker,
            worker.id,
        )

        launch_build_worker.delay(worker.id, exec_log.id)

        return Response(
            data=dict(build=worker.slug, log=exec_log.slug),
            status=status.HTTP_201_CREATED,
        )
Ejemplo n.º 13
0
def remove_env_vars(service):
    client = get_boto3_client("ssm", service.project.environment.get_creds())
    for env_vars in service.env_vars_generator(client, batch_size=10):
        names = [env_var["Name"] for env_var in env_vars]
        logger.info("Removing env vars: %s", names)
        client.delete_parameters(Names=names)