def get_status(client, tag_name, scheduled_action_name):
    """Gets the status of an autoscaling group.

    Keyword arguments:
    client -- boto3 autoscaling client
    tag_name -- tags used to identify autoscaling group
    scheduled_action_name -- name of the scheduled action to get status for
    """
    scaling_group = discover_asg(asg_client=client, tag_name=tag_name)

    desired_capacity = scaling_group['DesiredCapacity']
    instance_count = len(scaling_group['Instances'])
    scaling_group_name = scaling_group['AutoScalingGroupName']

    response = client.describe_scheduled_actions(
        AutoScalingGroupName=scaling_group_name,
        ScheduledActionNames=[scheduled_action_name])

    scheduled_desired_capacity = response['ScheduledUpdateGroupActions'][0][
        'DesiredCapacity']

    return {
        'scaling_group_name': scaling_group_name,
        'desired_capacity': desired_capacity,
        'instance_count': instance_count,
        'scheduled_desired_capacity': scheduled_desired_capacity
    }
def get_status(client, tag_name, scheduled_action_name):
    """Gets the status of an autoscaling group.

    Keyword arguments:
    client -- boto3 autoscaling client
    tag_name -- tags used to identify autoscaling group
    scheduled_action_name -- name of the scheduled action to get status for
    """
    scaling_group = discover_asg(asg_client=client, tag_name=tag_name)

    desired_capacity = scaling_group["DesiredCapacity"]
    instance_count = len(scaling_group["Instances"])
    scaling_group_name = scaling_group["AutoScalingGroupName"]

    response = client.describe_scheduled_actions(
        AutoScalingGroupName=scaling_group_name,
        ScheduledActionNames=[scheduled_action_name],
    )

    scheduled_desired_capacity = response["ScheduledUpdateGroupActions"][0][
        "DesiredCapacity"
    ]

    return {
        "scaling_group_name": scaling_group_name,
        "desired_capacity": desired_capacity,
        "instance_count": instance_count,
        "scheduled_desired_capacity": scheduled_desired_capacity,
    }
        scheduled_action_str = "The instances will be turned off at 8pm this evening.\n"

    print(instance_str)
    print(scheduled_action_str)


if __name__ == '__main__':
    args = docopt.docopt(__doc__)

    instance_type = args['--type'] or 't2'
    namespace = args['--namespace'] or 'notebook'
    tag_name = 'jupyter-%s-%s' % (instance_type, namespace)
    scheduled_action_name = 'ensure_down'

    client = boto3.client('autoscaling')
    scaling_group_name = discover_asg(
        asg_client=client, tag_name=tag_name)['AutoScalingGroupName']

    if args['--start']:
        set_asg_size(asg_client=client,
                     asg_name=scaling_group_name,
                     desired_size=1)

    if args['--stop']:
        set_asg_size(asg_client=client,
                     asg_name=scaling_group_name,
                     desired_size=0)

    if args['--enable-overnight']:
        update_desired_capacity_for_scheduled_action(
            client=client,
            scaling_group_name=scaling_group_name,
Exemple #4
0
def main():
    args = docopt.docopt(__doc__)

    key_path = args["--key"] or _default_ssh_key_path()
    namespace = args["--namespace"] or "notebook"

    print("Using SSH key at path %r" % key_path)

    assert os.path.exists(key_path)

    port = args["--port"] or "8888"

    instance_type = args["--type"] or "t2"
    tag_name = "jupyter-%s-%s" % (instance_type, namespace)

    asg_client = boto3.client("autoscaling")

    while True:
        asg_data = discover_asg(asg_client=asg_client, tag_name=tag_name)

        if not asg_data["Instances"]:
            _wait("No instances in ASG group %r" %
                  asg_data["AutoScalingGroupName"])
            continue

        in_service_instances = [
            inst for inst in asg_data["Instances"]
            if inst["LifecycleState"] == "InService"
        ]

        if not in_service_instances:
            _wait('No instances in ASG group %r are "InService"' %
                  asg_data["AutoScalingGroupName"])
            continue

        assert len(in_service_instances) > 0
        break

    instance_data = in_service_instances[0]
    instance_id = instance_data["InstanceId"]

    print("Looking up EC2 instance ID %r" % instance_id)

    ec2_client = boto3.client("ec2")
    resp = ec2_client.describe_instances(InstanceIds=[instance_id])

    try:
        instances = resp["Reservations"][0]["Instances"]
        ec2_data = instances[0]
        assert ec2_data["InstanceId"] == instance_id

        public_dns = ec2_data["PublicDnsName"]
    except (IndexError, KeyError) as err:
        print("Unexpected error parsing the EC2 response: %r" % err)
        sys.exit("resp=%r" % resp)

    print("Connecting to instance %r" % public_dns)

    try:
        subprocess.check_call([
            "ssh",
            # Use the provided SSH key to connect.
            "-i",
            key_path,
            # Create a tunnel to port 8888 (Jupyter) on the remote host
            "-L",
            "%s:%s:8888" % (port, public_dns),
            # Our data science AMI is based on Ubuntu
            "ubuntu@%s" % public_dns,
        ])
    except subprocess.CalledProcessError as err:
        sys.exit(err.returncode)