Ejemplo n.º 1
0
    def generate(self, incident: str, **kwargs) -> List[str]:
        """Generates the commands that will be run on the host."""
        logger.debug("Generating osquery payload.")
        session = Session()

        # TODO check for existence before deployment
        # we run with these commands with diffy credentials so as to not pollute the on-instance credentials
        creds = session.get_credentials()
        region = kwargs.get("region", CONFIG.get("DIFFY_PAYLOAD_OSQUERY_REGION"))
        key = kwargs.get("key", CONFIG.get("DIFFY_PAYLOAD_OSQUERY_KEY"))

        if not region:
            raise BadArguments(
                "DIFFY_PAYLOAD_OSQUERY_REGION required for use with OSQuery plugin."
            )

        if not key:
            raise BadArguments(
                "DIFFY_PAYLOAD_OSQUERY_KEY required for use with OSQuery plugin."
            )

        commands: List[str] = [
            f"export AWS_ACCESS_KEY_ID={creds.access_key}",
            f"export AWS_SECRET_ACCESS_KEY={creds.secret_key}",
            f"export AWS_SESSION_TOKEN={creds.token}",
            f"cd $(mktemp -d -t binaries-{incident}-`date +%s`-XXXXXX)",
            f"aws s3 --region {region} cp s3://{key} ./latest.tar.bz2 --quiet",
            "tar xvf latest.tar.bz2 &>/dev/null",
        ]

        commands += CONFIG.get("DIFFY_PAYLOAD_OSQUERY_COMMANDS")
        return commands
Ejemplo n.º 2
0
def create_s3_session(url, connection={}):
    url = url_util.parse(url)
    if url.scheme != 's3':
        raise ValueError(
            'Can not create S3 session from URL with scheme: {SCHEME}'.format(
                SCHEME=url.scheme))

    # NOTE(opadron): import boto and friends as late as possible.  We don't
    # want to require boto as a dependency unless the user actually wants to
    # access S3 mirrors.
    from boto3 import Session  # type: ignore[import]
    from botocore.exceptions import ClientError  # type: ignore[import]

    s3_connection, s3_client_args = get_mirror_s3_connection_info(connection)

    session = Session(**s3_connection)
    # if no access credentials provided above, then access anonymously
    if not session.get_credentials():
        from botocore import UNSIGNED  # type: ignore[import]
        from botocore.client import Config  # type: ignore[import]

        s3_client_args["config"] = Config(signature_version=UNSIGNED)

    client = session.client('s3', **s3_client_args)
    client.ClientError = ClientError
    return client
Ejemplo n.º 3
0
def create_s3_session(url):
    url = url_util.parse(url)
    if url.scheme != 's3':
        raise ValueError(
            'Can not create S3 session from URL with scheme: {SCHEME}'.format(
                SCHEME=url.scheme))

    # NOTE(opadron): import boto and friends as late as possible.  We don't
    # want to require boto as a dependency unless the user actually wants to
    # access S3 mirrors.
    from boto3 import Session

    session = Session()

    s3_client_args = {"use_ssl": spack.config.get('config:verify_ssl')}

    endpoint_url = os.environ.get('S3_ENDPOINT_URL')
    if endpoint_url:
        if urllib_parse.urlparse(endpoint_url, scheme=None).scheme is None:
            endpoint_url = '://'.join(('https', endpoint_url))

        s3_client_args['endpoint_url'] = endpoint_url

    # if no access credentials provided above, then access anonymously
    if not session.get_credentials():
        from botocore import UNSIGNED
        from botocore.client import Config

        s3_client_args["config"] = Config(signature_version=UNSIGNED)

    return session.client('s3', **s3_client_args)
Ejemplo n.º 4
0
def printCurrCreds() :
    session = Session()
    credentials = session.get_credentials()
    current_credentials = credentials.get_frozen_credentials()

    print(current_credentials.access_key)
    print(current_credentials.secret_key)
    print(current_credentials.token)
Ejemplo n.º 5
0
def main(bucket):
    secrets_file = os.path.join(os.path.dirname(__file__), "..",
                                "aws_secrets.txt")
    if os.path.isfile(secrets_file):
        print(f"Loading AWS secrets from file {secrets_file}")

        from configparser import ConfigParser

        config = ConfigParser()
        config.read(secrets_file)

        for k, v in config.items():
            for x, y in v.items():
                var = str(x).upper()
                os.environ[var] = str(y)
    else:
        print("No AWS secrets file found. Loading from boto.")
        from boto3 import Session

        session = Session()
        credentials = session.get_credentials()
        current_credentials = credentials.get_frozen_credentials()

        os.environ["AWS_ACCESS_KEY_ID"] = current_credentials.access_key
        os.environ["AWS_SECRET_ACCESS_KEY"] = current_credentials.secret_key
        os.environ["AWS_SESSION_TOKEN"] = current_credentials.token

    if all(
            os.getenv(k, "") for k in [
                "AWS_ACCESS_KEY_ID",
                "AWS_SECRET_ACCESS_KEY",
                "AWS_SESSION_TOKEN",
            ]):
        print("AWS secrets found in env.")
    else:
        print("Warning: No AWS secrets found in env!")

    ray.init(address="auto")

    num_samples = 16
    results_per_second = 10 / 60
    trial_length_s = 300

    max_runtime = 650

    timed_tune_run(
        name="durable trainable",
        num_samples=num_samples,
        results_per_second=results_per_second,
        trial_length_s=trial_length_s,
        max_runtime=max_runtime,
        checkpoint_freq_s=10,  # Once every 10 seconds
        checkpoint_size_b=int(10 * 1000**2),  # 10 MB
        keep_checkpoints_num=2,
        resources_per_trial={"cpu": 2},
        sync_config=tune.SyncConfig(upload_dir=f"s3://{bucket}/durable/", ),
    )
Ejemplo n.º 6
0
def gen_profile_text(
    session: boto3.Session,
    allowed_role_arn: str,
    allowed_eid_role_arn: str,
    denied_role_arn: str,
) -> str:
    creds = session.get_credentials()
    region = session.region_name

    return f"""\
Ejemplo n.º 7
0
def get_credentials_from_profile(AWS_PROFILE):
    """Will use AWS_PROFILE to fetch a Session and retrieve Access Key and Secret Key
    
    Positional arguments:
    AWS_PROFILE -- (string) name of local AWS Profile
    """
    from boto3 import Session, setup_default_session

    setup_default_session(profile_name=AWS_PROFILE)

    session = Session()
    credentials = session.get_credentials()
    current_credentials = credentials.get_frozen_credentials()
    return (current_credentials.access_key, current_credentials.secret_key)
Ejemplo n.º 8
0
def setup_aws_creds():
    session = Session()
    creds = session.get_credentials()
    if not creds:
        print_error_msg('''
AWS credentials not configured.
configure through awscli or through orcl
orcl configure set -k aws_access_key_id -v <access_key_id>
orcl configure set -k aws_secret_access_key -v <aws_secret_access_key>
orcl configure set -k region -v <region>
''')
        sys.exit(1)

    frozen_creds = creds.get_frozen_credentials()
    os.environ["AWS_ACCESS_KEY_ID"] = frozen_creds.access_key
    os.environ["AWS_SECRET_ACCESS_KEY"] = frozen_creds.secret_key
Ejemplo n.º 9
0
    def CrearConexionS3(self):

        from boto3 import Session
        session = Session()

        credentials = session.get_credentials()
        current_credentials = credentials.get_frozen_credentials()

        s3 = boto3.client(
            's3',
            aws_access_key_id=current_credentials.access_key,
            aws_secret_access_key=current_credentials.secret_key,
            aws_session_token=current_credentials.token,
            region_name='us-west-2',  # Oregon
            use_ssl=False)

        return s3
Ejemplo n.º 10
0
def setup_aws_environ():
    """Set up aws configuration attributes in environment"""
    session = Session()
    creds = session.get_credentials()
    if not creds or not session.region_name:
        print_error_msg('''
AWS credentials not configured.
configure through awscli or through orcl
orcl configure set -k aws_access_key_id -v <access_key_id>
orcl configure set -k aws_secret_access_key -v <aws_secret_access_key>
orcl configure set -k region -v <region>
''')
        sys.exit(1)

    frozen_creds = creds.get_frozen_credentials()
    os.environ["AWS_ACCESS_KEY_ID"] = frozen_creds.access_key
    os.environ["AWS_SECRET_ACCESS_KEY"] = frozen_creds.secret_key
    os.environ["AWS_REGION"] = session.region_name
Ejemplo n.º 11
0
def get_aws_credentials(aws_profile: str,
                        config: Optional[SHConfig] = None) -> SHConfig:
    """Collects credentials from AWS profile and adds them to an instance of SHConfig.

    :param aws_profile: A name of AWS profile
    :param config: If existing config object is given credentials will be added to its copy, otherwise a new config
        object will be created.
    :return: A config object with AWS credentials that have been loaded from AWS profile.
    """
    config = config.copy() if config else SHConfig()

    aws_session = Session(profile_name=aws_profile)
    aws_credentials = aws_session.get_credentials()

    config.aws_access_key_id = aws_credentials.access_key or ""
    config.aws_secret_access_key = aws_credentials.secret_key or ""
    config.aws_session_token = aws_credentials.token or ""
    return config
Ejemplo n.º 12
0
def create_s3_session(url, connection={}):
    url = url_util.parse(url)
    if url.scheme != 's3':
        raise ValueError(
            'Can not create S3 session from URL with scheme: {SCHEME}'.format(
                SCHEME=url.scheme))

    # NOTE(opadron): import boto and friends as late as possible.  We don't
    # want to require boto as a dependency unless the user actually wants to
    # access S3 mirrors.
    from boto3 import Session
    from botocore.exceptions import ClientError

    s3_connection = {}

    if connection:
        if connection['access_token']:
            s3_connection["aws_session_token"] = connection["access_token"]
        if connection["access_pair"][0]:
            s3_connection["aws_access_key_id"] = connection["access_pair"][0]
            s3_connection["aws_secret_access_key"] = connection["access_pair"][
                1]
        if connection["profile"]:
            s3_connection["profile_name"] = connection["profile"]

    session = Session(**s3_connection)
    s3_client_args = {"use_ssl": spack.config.get('config:verify_ssl')}

    endpoint_url = os.environ.get('S3_ENDPOINT_URL')
    if endpoint_url:
        s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
    elif connection and 'endpoint_url' in connection:
        s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(
            connection["endpoint_url"])  # noqa: E501
    # if no access credentials provided above, then access anonymously
    if not session.get_credentials():
        from botocore import UNSIGNED
        from botocore.client import Config

        s3_client_args["config"] = Config(signature_version=UNSIGNED)

    client = session.client('s3', **s3_client_args)
    client.ClientError = ClientError
    return client
Ejemplo n.º 13
0
    def generate(self, incident: str, **kwargs) -> List[str]:
        """Generates the commands that will be run on the host."""
        logger.debug("Generating osquery payload.")
        session = Session()

        # If osquery isn't present, obtain an osquery binary from S3.
        if not which("osqueryi"):
            # We run these commands with Diffy credentials so as to not pollute
            # the on-instance credentials.
            creds = session.get_credentials()
            region = kwargs.get("region", CONFIG.get("DIFFY_PAYLOAD_OSQUERY_REGION"))
            key = kwargs.get("key", CONFIG.get("DIFFY_PAYLOAD_OSQUERY_KEY"))

            if not region:
                raise BadArguments(
                    "DIFFY_PAYLOAD_OSQUERY_REGION required for use with OSQuery plugin."
                )

            if not key:
                raise BadArguments(
                    "DIFFY_PAYLOAD_OSQUERY_KEY required for use with OSQuery plugin."
                )

            # If we've downloaded our own osquery collection binary, create a
            # symbolic link, allowing us to use relative commands elsewhere.
            commands: List[str] = [
                f"export AWS_ACCESS_KEY_ID={creds.access_key}",
                f"export AWS_SECRET_ACCESS_KEY={creds.secret_key}",
                f"export AWS_SESSION_TOKEN={creds.token}",
                f"cd $(mktemp -d -t binaries-{incident}-`date +%s`-XXXXXX)",
                f"aws s3 --region {region} cp s3://{key} ./latest.tar.bz2 --quiet",
                "tar xvf latest.tar.bz2 &>/dev/null",
                "export PATH=${PATH}:${HOME}/.local/bin",
                "mkdir -p ${HOME}/.local/bin",
                "ln -s ./usr/bin/osqueryi ${HOME}/.local/bin/osqueryi",
            ]
        else:
            commands: List[str] = [
                f"cd $(mktemp -d -t binaries-{incident}-`date +%s`-XXXXXX)"
            ]

        commands += CONFIG.get("DIFFY_PAYLOAD_OSQUERY_COMMANDS")
        return commands
Ejemplo n.º 14
0
def display_access_keys(session: boto3.Session):
    target_key = session.get_credentials().access_key
    iam = session.client('iam')
    response = iam.list_access_keys()['AccessKeyMetadata']
    safe_print('Displaying Access Keys'.center(45, '='))
    safe_print('ACCESS KEY ID'.ljust(20), end='  ')
    safe_print('STATUS'.ljust(10), end='  ')
    safe_print('CREATE DATE'.ljust(10))
    for key_metadata in response:
        color = None if target_key != key_metadata.get(
            'AccessKeyId') else colorama.Fore.MAGENTA
        safe_print(key_metadata.get('AccessKeyId'), end='  ', color=color)
        safe_print('[{}]'.format(key_metadata.get('Status')).ljust(10),
                   end='  ',
                   color=color)
        safe_print('{}'.format(
            key_metadata.get('CreateDate').strftime('%d/%M/%Y')).ljust(10),
                   color=color)
    safe_print(''.center(45, '='))
Ejemplo n.º 15
0
    def __init__(self, config, logger):
        self.logger = logger
        self.config = config

        session = Session()
        credentials = session.get_credentials()
        current_credentials = credentials.get_frozen_credentials()

        self.cloudformation_client = boto3.client(
            service_name='cloudformation',
            aws_access_key_id=current_credentials.access_key,
            aws_secret_access_key=current_credentials.secret_key,
            aws_session_token=current_credentials.token,
            region_name=config.get('boto', 'AWS_DEFAULT_REGION'))
        self.ec2_client = boto3.client(
            service_name='ec2',
            aws_access_key_id=current_credentials.access_key,
            aws_secret_access_key=current_credentials.secret_key,
            aws_session_token=current_credentials.token,
            region_name=config.get('boto', 'AWS_DEFAULT_REGION'))
Ejemplo n.º 16
0
 def _is_valid_session(self, session: boto3.Session):
     """Tests whether a cached session is valid or not."""
     log.debug(
         f"Checking session validity for session: {session.get_credentials().get_frozen_credentials().token}"
     )
     token = session.get_credentials().get_frozen_credentials().token
     if token in self._valid_tokens:
         log.debug(
             f"Session with token: {token} has validity: {self._valid_tokens[token].is_valid()}"
         )
         return self._valid_tokens[token].is_valid()
     else:
         try:
             log.info(f"Testing session with token: {token}")
             sts = session.client('sts')
             sts.get_caller_identity()
             self._valid_tokens[token] = SessionTokenCache(token=token)
             log.info("Adding session to cache, it's valid.")
             return True
         except ClientError:
             log.info("Session is invalid, returning false.")
             return False
Ejemplo n.º 17
0
def clientConfiguration():
    configDetails = {
        'keyID':
        os.environ['KEY_ID'],
        'key':
        os.environ['KEY'],
        'region':
        'us-east-1',
        'endpoint':
        'https://q2zhpsalonfsngxmhgvvucqoii.appsync-api.us-east-1.amazonaws.com/graphql'
    }

    headers = {
        'Accept': 'application/json',
        'Content-Type': 'application/json',
    }

    aws = AWSSession(aws_access_key_id=configDetails['keyID'],
                     aws_secret_access_key=configDetails['key'],
                     region_name=configDetails['region'])

    credentials = aws.get_credentials().get_frozen_credentials()

    auth = AWS4Auth(
        credentials.access_key,
        credentials.secret_key,
        aws.region_name,
        'appsync',
        session_token=credentials.token,
    )

    transport = RequestsHTTPTransport(url=configDetails['endpoint'],
                                      headers=headers,
                                      auth=auth)

    client = Client(transport=transport, fetch_schema_from_transport=True)

    return client
Ejemplo n.º 18
0
def get_keys(iamuser):
    all_keys = []
    try:
        session = Session()
        credentials = session.get_credentials()
        credentials = credentials.get_frozen_credentials()
        print(f"\nAccess em uso: {credentials.access_key}")
        iam = client('iam')
        keys = iam.list_access_keys(UserName=iamuser)
    except Exception as e:
        print('Não foi possível conectar à AWS.')
        print('Erro:', e)
        exit()
        
    for key in keys['AccessKeyMetadata']:
        user_key = {}
        
        if key['Status'] != 'Active':
            continue
            
        user_key['Id']=key['AccessKeyId']
        user_key['days'] = (datetime.now(key['CreateDate'].tzinfo) - key['CreateDate']).days
        
        new_access_key = None

        if int(user_key['days']) >= DAYS_THRESHOLD:
            print('\n Eita porra! Essa já poderia ser rotacionada, hein? \n'
                  f" \n\tAccess ID: {user_key['Id']} \t Idade (dias): {user_key['days']}")
            new_access_key = rotate_key(iamuser, str(key['AccessKeyId']))
            if new_access_key:
                user_key['Id'] = new_access_key
                user_key['days'] = 0

        all_keys.append(user_key)

    print(f'\nSuas chaves atuais:')
    for i in all_keys:
        print('\tID:', i['Id'], '\tDias:', i['days'])
Ejemplo n.º 19
0
    def __get_session_credentials(self):
        """
        Get session credentials
        """
        credentials = {}
        session = Session(region_name=self.region_name,
                          profile_name=self.profile_name)

        # if sts_arn is given, get credential by assuming given role
        if self.sts_arn:
            sts_client = session.client("sts", region_name=self.region_name)
            response = sts_client.assume_role(
                RoleArn=self.sts_arn,
                RoleSessionName=self.session_name,
                DurationSeconds=self.ttl,
            ).get("Credentials")

            credentials = {
                "access_key": response.get("AccessKeyId"),
                "secret_key": response.get("SecretAccessKey"),
                "token": response.get("SessionToken"),
                "expiry_time": response.get("Expiration").isoformat(),
            }
        else:
            session_credentials = session.get_credentials().__dict__
            credentials = {
                "access_key":
                session_credentials.get("access_key"),
                "secret_key":
                session_credentials.get("secret_key"),
                "token":
                session_credentials.get("token"),
                "expiry_time":
                datetime.fromtimestamp(time() + self.ttl).isoformat(),
            }

        return credentials
Ejemplo n.º 20
0
    def load(self) -> dict:

        s = get_docker_secret("node")

        if s is None:
            secrets = dict(os.environ)

            for x in self.config:
                secrets[x] = self.config[x]
        else:
            secrets = self.config.copy()

        try:
            session = Session()
            credentials = session.get_credentials()
            current_credentials = credentials.get_frozen_credentials()

            key = current_credentials.secret_key
            access = current_credentials.access_key

            secrets['AWS_ACCESS_KEY_ID'] = access
            secrets['AWS_SECRET_ACCESS_KEY'] = key
        except Exception as e:
            pass

        # configure default boto session here
        setup_default_session(
            aws_access_key_id=secrets.get('AWS_ACCESS_KEY_ID',
                                          'none_provided'),
            aws_secret_access_key=secrets.get('AWS_SECRET_ACCESS_KEY',
                                              'none provided'),
            region_name="us-west-2")

        self.fix_old_variable_names(secrets)

        return secrets
#!/usr/bin/python
# -*- coding: utf-8 -*-

# Using NAT Instances to connect Private instance to internet

import boto3

# Check if the user has the Access & Secret key configured
from boto3 import Session
session = Session()
credentials = session.get_credentials()
current_credentials = credentials.get_frozen_credentials()

# Break & Exit if any of the key is not present
if current_credentials.access_key is None:
    print "Access Key missing, use  `aws configure` to setup"
    exit()

if current_credentials.secret_key is None:
    print "Secret Key missing, use  `aws configure` to setup"
    exit()

globalVars = {}
globalVars['REGION_NAME'] = "ap-south-1"
globalVars['AZ1'] = "ap-south-1a"
globalVars['AZ2'] = "ap-south-1b"
globalVars['CIDRange'] = "10.242.0.0/24"
globalVars['tagName'] = "miztiik-wp-demo-00"
globalVars['EC2-RH-AMI-ID'] = "ami-cdbdd7a2"
globalVars['EC2-Amazon-AMI-ID'] = "ami-3c89f653"
globalVars['EC2-InstanceType'] = "t2.micro"
Ejemplo n.º 22
0
 def set_aws_credentials(self, session: boto3.Session) -> None:
     """ Sets the aws_credentials based on what's in the boto session """
     credentials = session.get_credentials()
     self.aws_credentials = credentials.get_frozen_credentials()
Ejemplo n.º 23
0
class S3(object):
    """
    S3 wrapper class which utilizes the boto3 library to push files to an S3
    bucket.

    Parameters
    ----------
    profile : str, optional
        The name of the AWS profile to use which is typical stored in the
        ``credentials`` file.  You can also set environment variable
        ``AWS_DEFAULT_PROFILE`` which would be used instead.

    kms_key : str, optional
        The KMS key to use for encryption
        If kms_key Defaults to ``None`` then the AES256 ServerSideEncryption
        will be used.

    **kwargs
        Optional keyword arguments.

    Attributes
    ----------
    profile : str
        String representing the AWS profile for authentication

    kms_key : str
        String representing the s3 kms key

    session : boto3.Session
        Hold the AWS session credentials / info

    s3 : botocore.client.S3
        Hold the S3 client object which is used to upload/delete files to S3

    Raises
    ------
    S3Error
        Error initializing AWS Session (ex: invalid profile)

    S3CredentialsError
        Issue with AWS credentials

    S3InitializationError
        Issue initializing S3 session
    """
    def __init__(self, profile=None, kms_key=None, **kwargs):

        self.profile = profile
        self.kms_key = kms_key
        self.session = None
        self.s3 = None

        self._set_session()
        self._set_client()

    def _set_session(self):
        try:
            self.session = Session(profile_name=self.profile)
            logger.info("Initialized AWS session.")
        except Exception as e:
            logger.error("Error initializing AWS Session, err: %s", e)
            raise S3Error("Error initializing AWS Session.")
        credentials = self.session.get_credentials()
        if credentials is None:
            raise S3CredentialsError("Credentials could not be set.")

    def _set_client(self):
        try:
            self.s3 = self.session.client(
                "s3", config=Config(signature_version="s3v4"))
            logger.info("Successfully initialized S3 client.")
        except Exception as e:
            logger.error("Error initializing S3 Client, err: %s", e)
            raise S3InitializationError("Error initializing S3 Client.")

    def _credentials_string(self):
        """Returns a credentials string for the Redshift COPY or UNLOAD command,
        containing credentials from the current session.

        Returns
        -------
        str
            What to submit in the ``COPY`` or ``UNLOAD`` job under CREDENTIALS
        """
        creds = self.session.get_credentials()
        if creds.token is not None:
            temp = "aws_access_key_id={};aws_secret_access_key={};token={}"
            return temp.format(creds.access_key, creds.secret_key, creds.token)
        else:
            temp = "aws_access_key_id={};aws_secret_access_key={}"
            return temp.format(creds.access_key, creds.secret_key)

    def _generate_s3_path(self, bucket, key):
        """Will return the S3 file URL in the format S3://bucket/key

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        Returns
        -------
        str
            string of the S3 file URL in the format S3://bucket/key
        """
        return "s3://{0}/{1}".format(bucket, key)

    def _generate_unload_path(self, bucket, folder):
        """Will return the S3 file URL in the format s3://bucket/folder if a
        valid (not None) folder is provided. Otherwise, returns s3://bucket

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        folder : str
            The folder to unload files to. Note, if the folder does not end
            with a /, the file names will be prefixed with the folder arg.

        Returns
        -------
        str
            string of the S3 file URL in the format s3://bucket/folder
            If folder is None, returns format s3://bucket
        """
        if folder:
            s3_path = "s3://{0}/{1}".format(bucket, folder)
        else:
            s3_path = "s3://{0}".format(bucket)
        return s3_path

    def upload_to_s3(self, local, bucket, key):
        """
        Upload a file to a S3 bucket.

        Parameters
        ----------
        local : str
            The local file which you wish to copy.

        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        Raises
        ------
        S3UploadError
            If there is a issue uploading to the S3 bucket
        """
        extra_args = {}
        try:
            # force ServerSideEncryption
            if self.kms_key is None:
                extra_args["ServerSideEncryption"] = "AES256"
                logger.info("Using AES256 for encryption")
            else:
                extra_args["ServerSideEncryption"] = "aws:kms"
                extra_args["SSEKMSKeyId"] = self.kms_key
                logger.info("Using KMS Keys for encryption")

            logger.info("Uploading file to S3 bucket: %s",
                        self._generate_s3_path(bucket, key))
            self.s3.upload_file(local,
                                bucket,
                                key,
                                ExtraArgs=extra_args,
                                Callback=ProgressPercentage(local))
        except Exception as e:
            logger.error("Error uploading to S3. err: %s", e)
            raise S3UploadError("Error uploading to S3.")

    def upload_list_to_s3(self, local_list, bucket, folder=None):
        """
        Upload a list of files to a S3 bucket.

        Parameters
        ----------
        local_list : list
            List of strings with the file paths of the files to upload

        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        folder : str, optional
            The AWS S3 folder of the bucket which you are copying the local
            files to. Defaults to ``None``. Please note that you must follow the
            ``/`` convention when using subfolders.

        Returns
        -------
        list
            Returns a list of the generated S3 bucket and keys of the files which were uploaded. The
            ``S3://`` part is NOT include. The output would look like the following:
            ``["my-bucket/key1", "my-bucket/key2", ...]``

        Notes
        -----
        There is a assumption that if you are loading multiple files (via `splits`) it follows a
        structure such as `file_name.extension.#` (`#` splits). It allows for the `COPY` statement
        to use the key prefix vs specificing an exact file name. The returned list helps with this
        process downstream.
        """
        output = []
        for file in local_list:
            if folder is None:
                s3_key = os.path.basename(file)
            else:
                s3_key = "/".join([folder, os.path.basename(file)])
            self.upload_to_s3(file, bucket, s3_key)
            output.append("/".join([bucket, s3_key]))
        return output

    def download_from_s3(self, bucket, key, local):
        """
        Download a file from a S3 bucket.

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        local : str
            The local file which you wish to copy to.

        Raises
        ------
        S3DownloadError
            If there is a issue downloading to the S3 bucket
        """
        try:
            logger.info(
                "Downloading file from S3 bucket: %s",
                self._generate_s3_path(bucket, key),
            )
            config = TransferConfig(max_concurrency=5)
            self.s3.download_file(bucket, key, local, Config=config)
        except Exception as e:
            logger.error("Error downloading from S3. err: %s", e)
            raise S3DownloadError("Error downloading from S3.")

    def download_list_from_s3(self, s3_list, local_path=None):
        """
        Download a list of files from s3.

        Parameters
        ----------
        s3_list : list
            List of strings with the s3 paths of the files to download

        local_path : str, optional
            The local path where the files will be copied to. Defualts to the current working
            directory (``os.getcwd()``)

        Returns
        -------
        list
            Returns a list of strings of the local file names
        """
        if local_path is None:
            local_path = os.getcwd()

        output = []
        for f in s3_list:
            s3_bucket, key = self.parse_s3_url(f)
            local = os.path.join(local_path, os.path.basename(key))
            self.download_from_s3(s3_bucket, key, local)
            output.append(local)
        return output

    def delete_from_s3(self, bucket, key):
        """
        Delete a file from an S3 bucket.

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket from which you are deleting the file.

        key : str
            The name of the S3 object.

        Raises
        ------
        S3DeletionError
            If there is a issue deleting from the S3 bucket
        """
        try:
            logger.info("Deleting file from S3 bucket: %s",
                        self._generate_s3_path(bucket, key))
            self.s3.delete_object(Bucket=bucket, Key=key)
        except Exception as e:
            logger.error("Error deleting from S3. err: %s", e)
            raise S3DeletionError("Error deleting from S3.")

    def delete_list_from_s3(self, s3_list):
        """
        Delete a list of files from an S3 bucket.

        Parameters
        ----------
        s3_list : list
            List of strings with the s3 paths of the files to delete. The strings should not include
            the `s3://` scheme.
        """
        for file in s3_list:
            s3_bucket, s3_key = self.parse_s3_url(file)
            self.delete_from_s3(s3_bucket, s3_key)

    def parse_s3_url(self, s3_url):
        """
        Parse a string of the s3 url to extract the bucket and key.
        scheme or not.

        Parameters
        ----------
        s3_url : str
            s3 url. The string can include the `s3://` scheme (which is disgarded)

        Returns
        -------
        bucket: str
            s3 bucket
        key: str
            s3 key
        """
        temp_s3 = s3_url.replace("s3://", "")
        s3_bucket = temp_s3.split("/")[0]
        s3_key = "/".join(temp_s3.split("/")[1:])
        return s3_bucket, s3_key
class S3(Cmd):
    """
    S3 wrapper class which utilizes the boto3 library to push files to an S3
    bucket.  Subclasses the Cmd class to inherit the Redshift connection.

    Parameters
    ----------
    profile : str, optional
        The name of the AWS profile to use which is typical stored in the
        ``credentials`` file.  You can also set environment variable
        ``AWS_DEFAULT_PROFILE`` which would be used instead.

    kms_key : str, optional
        The KMS key to use for encryption
        If kms_key Defaults to ``None`` then the AES256 ServerSideEncryption
        will be used.

    dbapi : DBAPI 2 module, optional
        A PostgreSQL database adapter which is Python DB API 2.0 compliant
        (``psycopg2``, ``pg8000``, etc.)

    host : str, optional
        Host name of the Redshift cluster to connect to.

    port : int, optional
        Port which connection will be made to Redshift.

    dbname : str, optional
        Redshift database name.

    user : str, optional
        Redshift users username.

    password : str, optional
        Redshift users password.

    config_yaml : str, optional
        String representing the file location of the credentials.

    s3_only : bool, optional
        If ``True`` then do not initialize the underlying redshift connection. It will
        allow users who want to soley interact with S3 to use that functionality.


    Attributes
    ----------
    session : boto3.Session
        Holds the session & credentials information for the S3 connection.

    kms_key : str
        String representing the s3 kms key

    session : boto3.Session
        Hold the AWS session credentials / info

    s3 : botocore.client.S3
        Hold the S3 client object which is used to upload/delete files to S3
    """
    def __init__(self,
                 profile=None,
                 kms_key=None,
                 dbapi=None,
                 host=None,
                 port=None,
                 dbname=None,
                 user=None,
                 password=None,
                 config_yaml=None,
                 s3_only=False,
                 **kwargs):

        self.profile = profile
        self.kms_key = kms_key
        self.session = None
        self.s3 = None

        if not s3_only:
            super(S3, self).__init__(dbapi, host, port, dbname, user, password,
                                     config_yaml)

        self._set_session()
        self._set_client()

    def _set_session(self):
        try:
            self.session = Session(profile_name=self.profile)
            logger.info('Successfully initialized AWS session.')
        except Exception as e:
            logger.error('Error initializing AWS Session, err: %s', e)
            raise S3Error('Error initializing AWS Session.')
        credentials = self.session.get_credentials()
        if credentials is None:
            raise S3CredentialsError('Credentials could not be set.')

    def _set_client(self):
        try:
            self.s3 = self.session.client(
                's3', config=Config(signature_version='s3v4'))
            logger.info('Successfully initialized S3 client.')
        except Exception as e:
            logger.error('Error initializing S3 Client, err: %s', e)
            raise S3InitializationError('Error initializing S3 Client.')

    def _credentials_string(self):
        """Returns a credentials string for the Redshift COPY or UNLOAD command,
        containing credentials from the current session.

        Returns
        -------
        str
            What to submit in the ``COPY`` or ``UNLOAD`` job under CREDENTIALS
        """
        creds = self.session.get_credentials()
        if creds.token is not None:
            temp = 'aws_access_key_id={};aws_secret_access_key={};token={}'
            return temp.format(creds.access_key, creds.secret_key, creds.token)
        else:
            temp = 'aws_access_key_id={};aws_secret_access_key={}'
            return temp.format(creds.access_key, creds.secret_key)

    def _generate_s3_path(cls, bucket, key):
        """Will return the S3 file URL in the format S3://bucket/key

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        Returns
        -------
        str
            string of the S3 file URL in the format S3://bucket/key
        """
        return "s3://{0}/{1}".format(bucket, key)

    def _generate_unload_path(cls, bucket, folder):
        """Will return the S3 file URL in the format s3://bucket/folder if a
        valid (not None) folder is provided. Otherwise, returns s3://bucket

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        folder : str
            The folder to unload files to. Note, if the folder does not end
            with a /, the file names will be prefixed with the folder arg.

        Returns
        -------
        str
            string of the S3 file URL in the format s3://bucket/folder
            If folder is None, returns format s3://bucket
        """
        if folder:
            s3_path = "s3://{0}/{1}".format(bucket, folder)
        else:
            s3_path = "s3://{0}".format(bucket)
        return s3_path

    def upload_to_s3(self, local, bucket, key):
        """
        Upload a file to a S3 bucket.

        Parameters
        ----------
        local : str
            The local file which you wish to copy.

        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        Raises
        ------
        S3UploadError
            If there is a issue uploading to the S3 bucket
        """
        extra_args = {}
        try:
            # force ServerSideEncryption
            if self.kms_key is None:
                extra_args['ServerSideEncryption'] = 'AES256'
                logger.info('Using AES256 for encryption')
            else:
                extra_args['ServerSideEncryption'] = 'aws:kms'
                extra_args['SSEKMSKeyId'] = self.kms_key
                logger.info('Using KMS Keys for encryption')

            logger.info('Uploading file to S3 bucket %s',
                        self._generate_s3_path(bucket, key))
            self.s3.upload_file(local,
                                bucket,
                                key,
                                ExtraArgs=extra_args,
                                Callback=ProgressPercentage(local))

        except Exception as e:
            logger.error('Error uploading to S3. err: %s', e)
            raise S3UploadError('Error uploading to S3.')

    def download_from_s3(self, bucket, key, local):
        """
        Download a file from a S3 bucket.

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket which you are copying the local file to.

        key : str
            The key to name the S3 object.

        local : str
            The local file which you wish to copy to.

        Raises
        ------
        S3DownloadError
            If there is a issue downloading to the S3 bucket
        """
        try:
            logger.info('Downloading file from S3 bucket %s',
                        self._generate_s3_path(bucket, key))
            config = TransferConfig(max_concurrency=5)
            self.s3.download_file(bucket, key, local, Config=config)
        except Exception as e:
            logger.error('Error downloading from S3. err: %s', e)
            raise S3DownloadError('Error downloading from S3.')

    def delete_from_s3(self, bucket, key):
        """
        Delete a file from an S3 bucket.

        Parameters
        ----------
        bucket : str
            The AWS S3 bucket from which you are deleting the file.

        key : str
            The name of the S3 object.

        Raises
        ------
        S3DeletionError
            If there is a issue deleting from the S3 bucket
        """

        try:
            logger.info('Deleting file from S3 bucket %s',
                        self._generate_s3_path(bucket, key))
            self.s3.delete_object(Bucket=bucket, Key=key)
        except Exception as e:
            logger.error('Error deleting from S3. err: %s', e)
            raise S3DeletionError('Error deleting from S3.')

    def _copy_to_redshift(self,
                          tablename,
                          s3path,
                          delim='|',
                          copy_options=None):
        """Executes the COPY command to load CSV files from S3 into
        a Redshift table.

        Parameters
        ----------
        tablename : str
            The Redshift table name which is being loaded

        s3path : str
            S3 path of the input file. eg: ``s3://path/to/file.csv``

        delim : str
            The delimiter in a delimited file.

        copy_options : list
            List of strings of copy options to provide to the ``COPY`` command.
            Will have default options added in.

        Raises
        ------
        Exception
            If there is a problem executing the COPY command, a connection
            has not been initalized, or credentials are wrong.
        """
        if not self._is_connected():
            raise RedshiftConnectionError(
                'No Redshift connection object is present.')

        copy_options = add_default_copy_options(copy_options)
        copy_options_text = combine_copy_options(copy_options)
        base_copy_string = ("COPY {0} FROM '{1}' "
                            "CREDENTIALS '{2}' "
                            "DELIMITER '{3}' {4};")
        try:
            sql = base_copy_string.format(tablename, s3path,
                                          self._credentials_string(), delim,
                                          copy_options_text)
            self.execute(sql, commit=True)

        except Exception as e:
            logger.error('Error running COPY on Redshift. err: %s', e)
            raise RedshiftError('Error running COPY on Redshift.')

    def run_copy(self,
                 local_file,
                 s3_bucket,
                 table_name,
                 delim="|",
                 copy_options=None,
                 delete_s3_after=False,
                 splits=1,
                 compress=True,
                 s3_folder=None):
        """Loads a file to S3, then copies into Redshift.  Has options to
        split a single file into multiple files, compress using gzip, and
        upload to an S3 bucket with folders within the bucket.

        Notes
        -----
        If you are using folders in your S3 bucket please be aware of having
        special chars or backward slashes (``\``). These may cause the file to
        upload but fail on the ``COPY`` command.

        Parameters
        ----------
        local_file : str
            The local file which you wish to copy.

        s3_bucket : str
            The AWS S3 bucket which you are copying the local file to.

        table_name : str
            The Redshift table name which is being loaded

        delim : str, optional
            Delimiter for Redshift ``COPY`` command. Defaults to ``|``

        copy_options : list, optional
            A list (str) of copy options that should be appended to the COPY
            statement.  The class will insert a default for DATEFORMAT,
            COMPUPDATE and TRUNCATECOLUMNS if they are not provided in this
            list. See http://docs.aws.amazon.com/redshift/latest/dg/copy-parameters-data-conversion.html
            for options which could be passed.

        delete_s3_after : bool, optional
            Lets you specify to delete the S3 file after transfer if you want.

        splits : int, optional
            Number of splits to perform for paralell loading into Redshift.
            Must be greater than ``0``. Recommended that this number should be
            less than ``100``. Defaults to ``1``.

        compress : bool, optional
            Whether to compress the output file with ``gzip`` or leave it raw.
            Defaults to ``True``

        s3_folder : str, optional
            The AWS S3 folder of the bucket which you are copying the local
            file to. Defaults to ``None``. Please note that you must follow the
            ``/`` convention when using subfolders.
        """

        if copy_options is None:
            copy_options = []

        # generate the actual splitting of the files
        if splits > 1:
            upload_list = split_file(local_file, local_file, splits=splits)
        else:
            upload_list = [local_file]

        if compress:
            copy_options.append("GZIP")
            for i, f in enumerate(upload_list):
                gz = '{0}.gz'.format(f)
                compress_file(f, gz)
                upload_list[i] = gz
                os.remove(f)  # cleanup old files

        # copy file to S3
        for file in upload_list:
            if s3_folder is None:
                s3_key = os.path.basename(file)
            else:
                s3_key = '/'.join([s3_folder, os.path.basename(file)])

            self.upload_to_s3(file, s3_bucket, s3_key)

        # execute Redshift COPY
        self._copy_to_redshift(table_name,
                               self._generate_s3_path(
                                   s3_bucket,
                                   s3_key.split(os.extsep)[0]),
                               delim,
                               copy_options=copy_options)

        # delete file from S3 (if set to do so)
        if delete_s3_after:
            for file in upload_list:
                if s3_folder is None:
                    s3_key = os.path.basename(file)
                else:
                    s3_key = '/'.join([s3_folder, os.path.basename(file)])
                self.delete_from_s3(s3_bucket, s3_key)

    def run_unload(self,
                   query,
                   s3_bucket,
                   s3_folder=None,
                   export_path=False,
                   delimiter=',',
                   delete_s3_after=True,
                   parallel_off=False,
                   unload_options=None):
        """``UNLOAD`` data from Redshift, with options to write to a flat file,
        and store on S3.

        Parameters
        ----------
        query : str
            A query to be unloaded to S3. Typically a ``SELECT`` statement

        s3_bucket : str
            The AWS S3 bucket where the data from the query will be unloaded.

        s3_folder : str, optional
            The AWS S3 folder of the bucket where the data from the query will
            be unloaded. Defaults to ``None``. Please note that you must follow
            the ``/`` convention when using subfolders.

        export_path : str, optional
            If a ``export_path`` is provided, function will write the unloaded
            files to that folder.

        delimiter : str, optional
            Delimiter for unloading and file writing. Defaults to a comma.

        delete_s3_after : bool, optional
            Delete the files from S3 after unloading. Defaults to True.

        parallel_off : bool, optional
            Unload data to S3 as a single file. Defaults to False.
            Not recommended as it will decrease speed.

        unload_options : list, optional
            A list of unload options that should be appended to the UNLOAD
            statement.

        Raises
        ------
        Exception
            If no files are generated from the unload.
            If the column names from the query cannot be retrieved.
            If there is a issue with the execution of any of the queries.
        """
        # data = []
        s3path = self._generate_unload_path(s3_bucket, s3_folder)

        ## configure unload options
        if unload_options is None:
            unload_options = []

        unload_options.append("DELIMITER '{0}'".format(delimiter))
        if parallel_off:
            unload_options.append('PARALLEL OFF')

        ## run unload
        self._unload_to_s3(query=query,
                           s3path=s3path,
                           unload_options=unload_options)

        ## parse unloaded files
        files = self._unload_generated_files()
        if files is None:
            logger.error('No files generated from unload')
            raise Exception('No files generated from unload')

        columns = self._get_column_names(query)
        if columns is None:
            logger.error('Unable to retrieve column names from exported data')
            raise Exception(
                'Unable to retrieve column names from exported data.')

        # download files locally with same name
        # write columns to local file
        # write temp to local file
        # remove temp files
        if export_path:
            write_file([columns], delimiter, export_path)
            for f in files:
                key = urlparse(f).path[1:]
                local = os.path.basename(key)
                self.download_from_s3(s3_bucket, key, local)
                with open(local, 'rb') as temp_f:
                    with open(export_path, 'ab') as main_f:
                        for line in temp_f:
                            main_f.write(line)
                os.remove(local)

        ## delete unloaded files from s3
        if delete_s3_after:
            for f in files:
                key = urlparse(f).path[1:]
                self.delete_from_s3(s3_bucket, key)

    def _unload_to_s3(self, query, s3path, unload_options=None):
        """Executes the UNLOAD command to export a query from
        Redshift to S3.

        Parameters
        ----------
        query : str
            A query to be unloaded to S3.

        s3path : str
            S3 path for the output files.

        unload_options : list
            List of string unload options.

        Raises
        ------
        Exception
            If there is a problem executing the unload command.
        """
        if not self._is_connected():
            raise Exception("No Redshift connection object is present")

        unload_options = unload_options or []
        unload_options_text = ' '.join(unload_options)
        base_unload_string = ("UNLOAD ('{0}')\n"
                              "TO '{1}'\n"
                              "CREDENTIALS '{2}'\n"
                              "{3};")

        try:
            sql = base_unload_string.format(query.replace("'", r"\'"), s3path,
                                            self._credentials_string(),
                                            unload_options_text)
            self.execute(sql, commit=True)
        except Exception as e:
            logger.error('Error running UNLOAD on redshift. err: %s', e)
            raise

    def _get_column_names(self, query):
        """Gets a list of column names from the supplied query.

        Parameters
        ----------
        query : str
            A query (or table name) to be unloaded to S3.

        Returns
        -------
        list
            List of column names. Returns None if no columns were retrieved.
        """

        try:
            logger.info('Retrieving column names')
            sql = "SELECT * FROM ({}) WHERE 1 = 0".format(query)
            self.execute(sql)
            results = [desc for desc in self.cursor.description]
            if len(results) > 0:
                return [result[0].strip() for result in results]
            else:
                return None
        except Exception as e:
            logger.error('Error retrieving column names')
            raise

    def _unload_generated_files(self):
        """Gets a list of files generated by the unload process

        Returns
        -------
        list
            List of S3 file names
        """
        sql = ('SELECT path FROM stl_unload_log '
               'WHERE query = pg_last_query_id() ORDER BY path')
        try:
            logger.info('Getting list of unloaded files')
            self.execute(sql)
            results = self.cursor.fetchall()
            if len(results) > 0:
                return [result[0].strip() for result in results]
            else:
                return None
        except Exception as e:
            logger.error('Error retrieving unloads generated files')
            raise
Ejemplo n.º 25
0
    def _handle_get_ec2_role(self):

        session = Session(region_name=self._region)
        credentials = session.get_credentials()
        return credentials
Ejemplo n.º 26
0
class AWSContext(object):
    def __init__(self, context):
        self.__context = context
        self.__default_profile = None
        self.__session = None
        self.__session_without_role = None
        self.__args = None

    def initialize(self, args):
        self.__args = args

    def __init_session(self, region=None):
        # If region is provided then use it, otherwise defer to project region
        # If thats missing, boto3 will use AWS cli default region in session
        region = self.region if region is None else region

        if self.__args.aws_access_key or self.__args.aws_secret_key:
            if self.__args.profile:
                raise HandledError(
                    'Either the --profile or the --aws-secret-key and --aws-access-key options can be specified.'
                )
            if not self.__args.aws_access_key or not self.__args.aws_secret_key:
                raise HandledError(
                    'Both --aws-secret-key and --aws-access-key are required if one if either one is given.'
                )

            self.__session = Session(
                aws_access_key_id=self.__args.aws_access_key,
                aws_secret_access_key=self.__args.aws_secret_key,
                region_name=region)
            self.__set_boto3_environment_variables(self.__args.aws_access_key,
                                                   self.__args.aws_secret_key)

        else:
            if self.__args.profile:
                if self.__args.aws_access_key or self.__args.aws_secret_key:
                    raise HandledError(
                        'Either the --profile or the --aws-secret-key and --aws-access-key options can be specified.'
                    )

            profile = self.__args.profile or self.__default_profile or 'default'

            if self.__args.verbose:
                self.__context.view.using_profile(profile)

            try:
                self.__session = Session(profile_name=profile,
                                         region_name=self.region)
            except ProfileNotFound as e:
                if self.has_credentials_file():
                    raise HandledError(
                        'The AWS session failed to locate AWS credentials for profile {}. Ensure that an AWS profile is present with command \'lmbr_aws list-profiles\' or using the Credentials Manager (AWS -> Credentials manager) in Lumberyard.  The AWS error message is \'{}\''
                        .format(profile, e))
                try:
                    # Try loading from environment
                    self.__session = Session(region_name=self.region)
                except ProfileNotFound as e:
                    raise HandledError(
                        'The AWS session failed to locate AWS credentials from the environment. Ensure that an AWS profile is present with command \'lmbr_aws list-profiles\' or using the Credentials Manager (AWS -> Credentials manager) in Lumberyard.  The AWS error message is \'{}\''
                        .format(e))

            credentials = self.__session.get_credentials()

            if not credentials:
                raise HandledError(
                    'The AWS session failed to locate AWS credentials for profile {}. Ensure that an AWS profile is present with command \'lmbr_aws list-profiles\' or using the Credentials Manager (AWS -> Credentials manager) in Lumberyard.'
                    .format(profile))

            credentials = credentials.get_frozen_credentials()
            self.__set_boto3_environment_variables(credentials.access_key,
                                                   credentials.secret_key)
        self.__add_cloud_canvas_attribution(self.__session)

    def assume_role(self, logical_role_id, deployment_name, region=None):
        duration_seconds = 3600  # TODO add an option for this? Revisit after adding GUI support.
        if region is None:
            region = self.region
        credentials = self.get_temporary_credentials(logical_role_id,
                                                     deployment_name,
                                                     duration_seconds, region)

        self.__session = Session(
            aws_access_key_id=credentials.get('AccessKeyId'),
            aws_secret_access_key=credentials.get('SecretAccessKey'),
            aws_session_token=credentials.get('SessionToken'),
            region_name=region)
        self.__set_boto3_environment_variables(
            credentials.get('AccessKeyId'), credentials.get('SecretAccessKey'),
            credentials.get('SessionToken'))

        self.__add_cloud_canvas_attribution(self.__session)

    def __set_boto3_environment_variables(self,
                                          access_key,
                                          secret_key,
                                          session_token=None):
        # set the environment attributes that boto3 utilizes in case some initializes directly from the boto3 library
        os.environ["AWS_ACCESS_KEY_ID"] = access_key
        os.environ["AWS_SECRET_ACCESS_KEY"] = secret_key
        if session_token:
            os.environ["AWS_SESSION_TOKEN"] = session_token

    def __add_cloud_canvas_attribution(self, session):
        if session._session.user_agent_extra is None:
            session._session.user_agent_extra = '/Cloud Canvas'
        else:
            session._session.user_agent_extra += '/Cloud Canvas'

    def __get_role_arn(self, role_logical_id, deployment_name):
        role_path = self.__context.config.get_project_stack_name()

        if self.__find_role_in_template(
                role_logical_id, self.__context.config.
                deployment_access_template_aggregator.effective_template):

            if deployment_name is None:
                deployment_name = self.__context.config.default_deployment

            if deployment_name is None:
                raise HandledError(
                    'The deployment access role {} was specified, but no deployment was given and there is no default deployment set for the project.'
                    .format(role_logical_id))

            stack_arn = self.__context.config.get_deployment_access_stack_id(
                deployment_name)

            role_path = role_path + '/' + deployment_name

        elif self.__find_role_in_template(
                role_logical_id, self.__context.config.
                project_template_aggregator.effective_template):

            stack_arn = self.__context.config.project_stack_id

            deployment_name = None

        else:
            raise HandledError(
                'Could not find role "{}" in the project or deployment access templates.'
                .format(role_logical_id))

        role_physical_id = self.__context.stack.get_physical_resource_id(
            stack_arn, role_logical_id)
        account_id = util.get_account_id_from_arn(stack_arn)
        role_arn = 'arn:aws:iam::{}:role/{}/{}'.format(account_id, role_path,
                                                       role_physical_id)

        self.__context.view.using_role(deployment_name, role_logical_id,
                                       role_physical_id)

        return role_arn

    def __find_role_in_template(self, logical_role_id, template):
        resources = template.get('Resources', {})
        definition = resources.get(logical_role_id, None)
        if definition:
            if definition.get('Type', '') == 'AWS::IAM::Role':
                return True
        return False

    def client(self,
               service_name,
               region=None,
               use_role=True,
               endpoint_url=None):
        if self.__session is None:
            self.__init_session(region)

        if use_role:
            session = self.__session
        else:
            session = self.__session_without_role

        client = session.client(service_name,
                                region_name=region,
                                config=Config(signature_version='s3v4'),
                                endpoint_url=endpoint_url)
        if service_name == 'cloudformation':
            wrapped_client = CloudFormationClientWrapper(
                client, self.__args.verbose)
        else:
            wrapped_client = ClientWrapper(client, self.__args.verbose)
        return wrapped_client

    def resource(self, service_name, region=None, use_role=True):
        if self.__session is None:
            self.__init_session()

        if use_role:
            session = self.__session
        else:
            session = self.__session_without_role

        return session.resource(service_name,
                                region_name=region,
                                config=Config(signature_version='s3v4'))

    @property
    def session(self):
        if self.__session is None:
            self.__init_session()
        return self.__session

    @property
    def region(self):
        if self.__session is not None:
            return self.__session.region_name
        else:
            return self.__context.config.project_region

    def set_default_profile(self, profile):
        self.__default_profile = profile
        self.__session = None

    def get_default_profile(self):
        return self.__default_profile

    def load_credentials(self):
        credentials = AwsCredentials()
        path = self.get_credentials_file_path()
        if os.path.isfile(path):
            self.__context.view.loading_file(path)
            credentials.read(path)
        return credentials

    def has_credentials_file(self):
        return os.path.isfile(self.get_credentials_file_path())

    def _get_sts_client(self, region):
        _endpoint_url = AWSSTSUtils(region).endpoint_url
        return self.client('sts', endpoint_url=_endpoint_url, region=region)

    def get_temporary_credentials(self, logical_role_id, deployment_name,
                                  duration_seconds, region):
        print("get_temporary_credentials REGION: {}".format(region))
        sts_client = self._get_sts_client(region=region)

        if logical_role_id:
            role_arn = self.__get_role_arn(logical_role_id, deployment_name)
            res = sts_client.assume_role(RoleArn=role_arn,
                                         RoleSessionName='lmbr_aws',
                                         DurationSeconds=duration_seconds)
            credentials = res.get('Credentials', {})
        else:
            res = sts_client.get_session_token(
                DurationSeconds=duration_seconds)
            credentials = res.get('Credentials', {})

        return credentials

    def save_credentials(self, credentials):
        path = self.get_credentials_file_path()
        dir = os.path.dirname(path)
        if not os.path.isdir(dir):
            os.makedirs(dir)
        self.__context.view.saving_file(path)
        credentials.write(path)

    @staticmethod
    def get_credentials_file_path():
        return os.path.join(os.path.expanduser('~'), '.aws', 'credentials')

    def profile_exists(self, profile):
        credentials = self.load_credentials()
        return credentials.has_section(profile)
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Check if the user has the Access & Secret key configured
import boto3
import time
from boto3 import Session
session = Session()
credentials = session.get_credentials()
current_credentials = credentials.get_frozen_credentials()
# Break & Exit if any of the key is not present
if current_credentials.access_key is None:
    print("Access Key missing, use  `aws configure` to setup")
    exit()
if current_credentials.secret_key is None:
    print("Secret Key missing, use  `aws configure` to setup")
    exit()
# VPC design for multi az deployments
globalVars = {}
globalVars['REGION_NAME']              = "ap-south-1"
globalVars['AZ1']                      = "ap-south-1a"
globalVars['AZ2']                      = "ap-south-1b"
globalVars['CIDRange']                 = "10.240.0.0/23"
globalVars['az1_pvtsubnet_CIDRange']   = "10.240.0.0/25"
globalVars['az1_pubsubnet_CIDRange']   = "10.240.0.128/26"
globalVars['az1_sparesubnet_CIDRange'] = "10.240.0.192/26"
globalVars['az2_pvtsubnet_CIDRange']   = "10.240.1.0/25"
globalVars['az2_pubsubnet_CIDRange']   = "10.240.1.128/26"
globalVars['az2_sparesubnet_CIDRange'] = "10.240.1.192/26"
globalVars['Project']                  = { 'Key': 'Name',        'Value': 'WordPress-Demo'}
globalVars['tags']                     = [{'Key': 'Owner',       'Value': 'Miztiik'},
def _print_relevant_environment_vars(environ):
    session = Session()
    print("Session current region={}".format(session.region_name))
    print("Session available_profiles={}".format(session.available_profiles))
    print("Session access_key={}".format(session.get_credentials().access_key))
    print("Session secret_key={}".format(session.get_credentials().secret_key))
Ejemplo n.º 29
0
def main():
    parser = argparse.ArgumentParser(description='Enumerate IAM permissions')

    parser.add_argument(
        '--profile',
        help=
        'AWS profile name fetched from credentials file. Specify this parameter or access-key and secret-key manually.'
    )
    parser.add_argument('--access-key',
                        help='AWS access key if profile was not used')
    parser.add_argument('--secret-key',
                        help='AWS secret key if profile was not used')
    parser.add_argument('--session-token', help='STS session token')
    parser.add_argument('--region',
                        help='AWS region to send API requests to',
                        default='us-east-1')
    parser.add_argument(
        '--output',
        help=
        'File to write output JSON containing all of the collected permissions'
    )
    parser.add_argument(
        '--timeout',
        help=
        'Timeout in minutes for permissions brute-forcing activity. Def: 15.',
        type=int,
        default=15)
    #parser.add_argument('--verbose', action='store_true', help='Enable verbose output.')
    parser.add_argument('--debug',
                        action='store_true',
                        help='Enable debug output.')

    args = parser.parse_args()

    if args.profile and (args.access_key or args.secret_key
                         or args.session_token):
        sys.stderr.write(
            'error: Profile and raw AWS credential options are mutually exclusive.\n'
        )
        sys.stderr.write(
            '       Please specify either --profile or --access-key and --secret-key.\n\n'
        )
        parser.print_help()
        sys.exit(2)

    access_key = args.access_key
    secret_key = args.secret_key
    session_token = args.session_token

    if args.profile:
        session = Session(profile_name=args.profile)
        credentials = session.get_credentials()
        currcreds = credentials.get_frozen_credentials()
        access_key = currcreds.access_key
        secret_key = currcreds.secret_key
        session_token = currcreds.token

    level = logging.INFO
    if args.debug:
        level = logging.DEBUG

    output = enumerate_iam(access_key, secret_key, session_token, args.region,
                           args.timeout * 60, level)

    if args.output:
        with open(args.output, 'w') as f:
            f.write(json.dumps(output, indent=4, default=json_encoder))
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Check if the user has the Access & Secret key configured
import boto3
import time
from boto3 import Session
session = Session()
credentials = session.get_credentials()
current_credentials = credentials.get_frozen_credentials()
# Break & Exit if any of the key is not present
if current_credentials.access_key is None:
    print("Access Key missing, use  `aws configure` to setup")
    exit()
if current_credentials.secret_key is None:
    print("Secret Key missing, use  `aws configure` to setup")
    exit()
# VPC design for multi az deployments
globalVars = {}
globalVars['REGION_NAME'] = "ap-south-1"
globalVars['AZ1'] = "ap-south-1a"
globalVars['AZ2'] = "ap-south-1b"
globalVars['CIDRange'] = "10.240.0.0/23"
globalVars['az1_pvtsubnet_CIDRange'] = "10.240.0.0/25"
globalVars['az1_pubsubnet_CIDRange'] = "10.240.0.128/26"
globalVars['az1_sparesubnet_CIDRange'] = "10.240.0.192/26"
globalVars['az2_pvtsubnet_CIDRange'] = "10.240.1.0/25"
globalVars['az2_pubsubnet_CIDRange'] = "10.240.1.128/26"
globalVars['az2_sparesubnet_CIDRange'] = "10.240.1.192/26"
globalVars['Project'] = {'Key': 'Name', 'Value': 'WordPress-Demo'}
globalVars['tags'] = [{