Esempio n. 1
0
    def __init__(self,
            region='us-west-1',
            s3_bucket=None,
            image_id=None,
            aws_key_name=None,
            iam_profile=None,
            **kwargs
            ):
        # find config file
        from doodad.ec2.autoconfig import AUTOCONFIG
        from doodad.ec2.credentials import AWSCredentials
        s3_bucket = AUTOCONFIG.s3_bucket() if s3_bucket is None else s3_bucket
        image_id = AUTOCONFIG.aws_image_id(region) if image_id is None else image_id
        aws_key_name= AUTOCONFIG.aws_key_name(region) if aws_key_name is None else aws_key_name
        iam_profile= AUTOCONFIG.iam_profile_name() if iam_profile is None else iam_profile
        credentials=AWSCredentials(aws_key=AUTOCONFIG.aws_access_key(), aws_secret=AUTOCONFIG.aws_access_secret())
        security_group_ids = AUTOCONFIG.aws_security_group_ids()[region]
        security_groups = AUTOCONFIG.aws_security_groups()

        super(EC2AutoconfigDocker, self).__init__(
                s3_bucket=s3_bucket,
                image_id=image_id,
                aws_key_name=aws_key_name,
                iam_instance_profile_name=iam_profile,
                credentials=credentials,
                region=region,
                security_groups=security_groups,
                security_group_ids=security_group_ids,
                **kwargs
                )
Esempio n. 2
0
def sync_down(path, check_exists=True):
    is_docker = os.path.isfile("/.dockerenv")
    if is_docker:
        local_path = "/tmp/%s" % (path)
    else:
        local_path = "%s/%s" % (LOCAL_LOG_DIR, path)

    if check_exists and os.path.isfile(local_path):
        return local_path

    local_dir = os.path.dirname(local_path)
    os.makedirs(local_dir, exist_ok=True)

    if is_docker:
        from doodad.ec2.autoconfig import AUTOCONFIG
        os.environ["AWS_ACCESS_KEY_ID"] = AUTOCONFIG.aws_access_key()
        os.environ["AWS_SECRET_ACCESS_KEY"] = AUTOCONFIG.aws_access_secret()

    full_s3_path = os.path.join(AWS_S3_PATH, path)
    bucket_name, bucket_relative_path = split_s3_full_path(full_s3_path)
    try:
        bucket = boto3.resource('s3').Bucket(bucket_name)
        bucket.download_file(bucket_relative_path, local_path)
    except Exception as e:
        local_path = None
        print("Failed to sync! path: ", path)
        print("Exception: ", e)
    return local_path
Esempio n. 3
0
def _collect_instances(region):
    client = boto3.client(
        "ec2",
        region_name=region,
        aws_access_key_id=AUTOCONFIG.aws_access_key(),
        aws_secret_access_key=AUTOCONFIG.aws_access_secret(),
    )
    print("Collecting instances in region", region)
    instances = [x['Instances'][0] for x in client.describe_instances(
        Filters=[
            {
                'Name': 'instance.group-name',
                'Values': [
                    AUTOCONFIG.aws_security_groups()[0],
                ]
            },
            {
                'Name': 'instance-state-name',
                'Values': [
                    'running'
                ]
            }
        ]
    )['Reservations']]
    for instance in instances:
        instance['Region'] = region
    return instances
Esempio n. 4
0
def sync_down_folder(path):
    is_docker = os.path.isfile("/.dockerenv")
    if is_docker:
        local_path = "/tmp/%s" % (path)
    else:
        local_path = "%s/%s" % (LOCAL_LOG_DIR, path)

    local_dir = os.path.dirname(local_path)
    os.makedirs(local_dir, exist_ok=True)

    if is_docker:
        from doodad.ec2.autoconfig import AUTOCONFIG

        os.environ["AWS_ACCESS_KEY_ID"] = AUTOCONFIG.aws_access_key()
        os.environ["AWS_SECRET_ACCESS_KEY"] = AUTOCONFIG.aws_access_secret()

    full_s3_path = os.path.join(AWS_S3_PATH, path)
    bucket_name, bucket_relative_path = split_s3_full_path(full_s3_path)
    command = "aws s3 sync s3://%s/%s %s" % (
        bucket_name,
        bucket_relative_path,
        local_path,
    )
    print(command)
    stream = os.popen(command)
    output = stream.read()
    print(output)
    return local_path
Esempio n. 5
0
def get_clients():
    regions = REGIONS
    clients = []
    for region in regions:
        client = boto3.client(
            "ec2",
            region_name=region,
            aws_access_key_id=AUTOCONFIG.aws_access_key(),
            aws_secret_access_key=AUTOCONFIG.aws_access_secret(),
        )
        client.region = region
        clients.append(client)
    return clients