def aws_verbose():
    """
    Boto3 only provides usable information in DEBUG mode
    Using empty name it catchs debug from boto3/botocore
    TODO: Open a ticket in boto3/botocore project to provide more information at other levels of debugging
    """
    boto3.set_stream_logger(name="")
Example #2
0
def main(args):
    project = get_project(args.datafile)

    region = os.getenv("AWS_DEFAULT_REGION", args.region)
    kw = {}
    if not os.getenv("AWS_PROFILE"):
        kw = dict(
            aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
            aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
            aws_session_token=os.getenv('AWS_SESSION_TOKEN'),
        )

    if args.debug:
        boto3.set_stream_logger(name='botocore')
        trollius_log(level=logging.DEBUG)

    session = boto3.session.Session(region_name=region, **kw)
    storage = S3(
        session=session,
        vault=args.vault,
        vaultkey=args.vaultkey,
        env=args.env,
        region=args.region,
        prefix=args.project,
        project=project,
    )

    method = getattr(storage, args.action)
    fn = lambda: method(**vars(args))
    result = yield from (fn())
    prettyprint(result, args)
 def __init__(self,
              bucket,
              key,
              local_path,
              logger,
              aws_access_key_id,
              aws_secret_access_key,
              storage_class,
              part_size=int(15e6),
              region_name="us-east-1",
              metadata={},
              verbose=False):
     self.bucket = bucket
     self.key = key
     self.path = local_path
     self.logger = logger
     self.total_bytes = os.stat(local_path).st_size
     self.part_bytes = part_size
     self.region = region_name
     self.metadata = metadata
     self.storage_class = storage_class
     #assert part_size > self.PART_MINIMUM
     #assert (self.total_bytes % part_size == 0
     #        or self.total_bytes % part_size > self.PART_MINIMUM)
     self.s3 = boto3.session.Session(
         region_name=region_name,
         aws_access_key_id=aws_access_key_id,
         aws_secret_access_key=aws_secret_access_key).client("s3")
     if verbose:
         boto3.set_stream_logger(name="boto")
     else:
         boto3.set_stream_logger('boto', logging.WARNING)
  def aws_fips_ec2_addr_hosting(self, ):
    
    boto3.set_stream_logger(name="botocore")
    host_description_data = dict()

    try:
      _boto3_session = boto3.session.Session(profile_name="betaDev", region_name="us-east-1")
      _boto3_client = _boto3_session.client(service_name="ec2", endpoint_url="https://ec2-fips.us-east-1.amazonaws.com", config=self.ec2_config)

      _describe_region_service = _boto3_client.describe_regions()
      _describe_addresses_service = _boto3_client.describe_addresses(DryRun=False)
      _describe_image_service = _boto3_client.describe_images(DryRun=False)
      # _services = _describe_image_lservice
      
      host_description_data["describe_region_service"] = _describe_region_service
      host_description_data["describe_addresses_service"] = _describe_region_service
      host_description_data["describe_image_service"] = _describe_image_service

    except Exception as error:
      logger.exception("Logging exceptionized error: " + "'"+ str(error) + "'")
      sys.exit(1)

    else:
      # return host_description_data
      return _services
def load_log_config():
    """
    Configure custom logformatter
    """
    # basic configuration
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    formatter = CustomLogFormatter(
        '[%(levelname)s]\t%(asctime)s.%(msecs)dZ\t%(levelno)s\t%(message)s\n',
        '%Y-%m-%dT%H:%M:%S')

    if logger.hasHandlers():
        logger.debug("using default lambda log handler")
        log_handler = logger.handlers[0]
        log_handler.setFormatter(formatter)
    else:
        logger.debug("creating a new handler")

        # initialize the handler
        log_handler = logging.StreamHandler(sys.stdout)
        log_handler.setFormatter(formatter)
        logger.addHandler(log_handler)

    ## set log level
    logger.setLevel(logging.DEBUG)

    ## set loglevel
    boto3.set_stream_logger('botocore', logging.WARNING)
    boto3.set_stream_logger('boto3', logging.WARNING)

    logging.getLogger('urllib3').setLevel(logging.WARNING)
    logging.getLogger('s3transfer').setLevel(logging.WARNING)

    return logger
Example #6
0
def main():
	conn = None

	device_opt = ["port", "no_password", "region", "access_key", "secret_key", "boto3_debug"]

	atexit.register(atexit_handler)

	define_new_opts()

	all_opt["power_timeout"]["default"] = "60"

	options = check_input(device_opt, process_input(device_opt))

	docs = {}
	docs["shortdesc"] = "Fence agent for AWS (Amazon Web Services)"
	docs["longdesc"] = "fence_aws is an I/O Fencing agent for AWS (Amazon Web\
Services). It uses the boto3 library to connect to AWS.\
\n.P\n\
boto3 can be configured with AWS CLI or by creating ~/.aws/credentials.\n\
For instructions see: https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration"
	docs["vendorurl"] = "http://www.amazon.com"
	show_docs(options, docs)

	run_delay(options)

	if options.get("--verbose") is not None:
		lh = logging.FileHandler('/var/log/fence_aws_debug.log')
		logger.addHandler(lh)
		lhf = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
		lh.setFormatter(lhf)
		logger.setLevel(logging.DEBUG)
	
	if options["--boto3_debug"].lower() not in ["1", "yes", "on", "true"]:
		boto3.set_stream_logger('boto3',logging.INFO)
		boto3.set_stream_logger('botocore',logging.CRITICAL)
		logging.getLogger('botocore').propagate = False
		logging.getLogger('boto3').propagate = False
	else:
		log_format = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
		logging.getLogger('botocore').propagate = False
		logging.getLogger('boto3').propagate = False
		fdh = logging.FileHandler('/var/log/fence_aws_boto3.log')
		fdh.setFormatter(log_format)
		logging.getLogger('boto3').addHandler(fdh)
		logging.getLogger('botocore').addHandler(fdh)
		logging.debug("Boto debug level is %s and sending debug info to /var/log/fence_aws_boto3.log", options["--boto3_debug"])

	region = options.get("--region")
	access_key = options.get("--access-key")
	secret_key = options.get("--secret-key")
	try:
		conn = boto3.resource('ec2', region_name=region,
				      aws_access_key_id=access_key,
				      aws_secret_access_key=secret_key)
	except Exception as e:
		fail_usage("Failed: Unable to connect to AWS: " + str(e))

	# Operate the fencing device
	result = fence_action(conn, options, set_power_status, get_power_status, get_nodes_list)
	sys.exit(result)
Example #7
0
def download(access_key, secret_key, data_home, verbose):
    """Download tokenization corpus from cloud with your key."""

    data_home = Path(os.path.expanduser(data_home))
    data_home.mkdir(parents=True, exist_ok=True)
    console.print(
        f"Data directory for customer review classification data {data_home}")

    if verbose:
        boto3.set_stream_logger("boto3", logging.DEBUG)
        boto3.set_stream_logger("botocore", logging.DEBUG)

    transport_params = {
        'session':
        boto3.Session(aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key),
        'resource_kwargs': {
            'endpoint_url': 'https://storage.googleapis.com',
        }
    }

    url = f"s3://sadedegel/dataset/customer_review_classification.zip"

    with open(url, 'rb', transport_params=transport_params) as fp:
        with ZipFile(fp) as zp:
            zp.extractall(data_home)
Example #8
0
def setup_logging(logger_level):
    the_logger = logging.getLogger()
    for old_handler in the_logger.handlers:
        the_logger.removeHandler(old_handler)

    new_handler = logging.StreamHandler(sys.stdout)

    hostname = socket.gethostname()

    json_format = (
        '{ "timestamp": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", '
        f'"environment": "{args.environment}", "application": "{args.application}", '
        f'"module": "%(module)s", "process": "%(process)s", '
        f'"thread": "[%(thread)s]", "hostname": "{hostname}" }} ')

    new_handler.setFormatter(logging.Formatter(json_format))
    the_logger.addHandler(new_handler)
    new_level = logging.getLevelName(logger_level.upper())
    the_logger.setLevel(new_level)

    if the_logger.isEnabledFor(logging.DEBUG):
        boto3.set_stream_logger()
        the_logger.debug(f'Using boto3", "version": "{boto3.__version__}')

    return the_logger
def setup_logging(logger_level):
    """Set the default logger with json output."""
    the_logger = logging.getLogger()
    for old_handler in the_logger.handlers:
        the_logger.removeHandler(old_handler)

    new_handler = logging.StreamHandler(sys.stdout)
    hostname = socket.gethostname()

    json_format = (
        f'{{ "timestamp": "%(asctime)s", "log_level": "%(levelname)s", "message": "%(message)s", '
        f'"module": "%(module)s", "process":"%(process)s", '
        f'"thread": "[%(thread)s]", "host": "{hostname}" }}')

    new_handler.setFormatter(logging.Formatter(json_format))
    the_logger.addHandler(new_handler)
    new_level = logging.getLevelName(logger_level)
    the_logger.setLevel(new_level)

    if the_logger.isEnabledFor(logging.DEBUG):
        # Log everything from boto3
        boto3.set_stream_logger()
        the_logger.debug(f'Using boto3", "version": "{boto3.__version__}')

    return the_logger
Example #10
0
 def enable_connection_debug(self, level=DEBUG, format_string=None):
     try:
         level = Eulogger.format_log_level(level, 'DEBUG')
         set_stream_logger('botocore', level=level, format_string=None)
     except:
         self.log.error('Could not enable debug for: "{0}"'.format(self))
         raise
def put_from_manifest(
        s3_bucket, s3_connection_host, s3_ssenc, s3_base_path,
        aws_access_key_id, aws_secret_access_key, manifest,
        bufsize, compress_data, concurrency=None, incremental_backups=False):
    """
    Uploads files listed in a manifest to amazon S3
    to support larger than 5GB files multipart upload is used (chunks of 60MB)
    files are uploaded compressed with lzop, the .lzo suffix is appended
    """
    bucket = get_bucket(
        s3_bucket, aws_access_key_id,
        aws_secret_access_key, s3_connection_host)

    # Create a boto3 session
    session = Session(aws_access_key_id = aws_access_key_id, aws_secret_access_key = aws_secret_access_key, region_name='us-east-1')
    client = session.client('s3')
    event_system = client.meta.events
    config = TransferConfig(
        multipart_threshold = MULTI_PART_UPLOAD_THRESHOLD,
        max_concurrency=4)
    transfer = S3Transfer(client, config)
    boto3.set_stream_logger('botocore', logging.INFO)

    manifest_fp = open(manifest, 'r')
    files = manifest_fp.read().splitlines()
    for f in files:
        file_path = s3_base_path + f
        print("boto3, upload file {0} to {1}: {2}".format(f, s3_bucket, file_path))
        transfer.upload_file(f, s3_bucket, file_path)
Example #12
0
def sync(config, parameter, debug, confirm, yes):
    confirm = confirm or yes
    if debug:
        LOGGER.setLevel(logging.DEBUG)
        boto3.set_stream_logger(name='boto3', level=logging.DEBUG)
        boto3.set_stream_logger(name='botocore', level=logging.DEBUG)
    else:
        LOGGER.setLevel(logging.INFO)

    if not confirm:
        check_update_available()
        click.confirm('This action will modify AWS infrastructure in account: {0}\nAre you sure?'.format(
            get_first_account_alias()), abort=True)

    try:

        config = Config(config_file=config, cli_params=parameter)
        StackActionHandler(config).create_or_update_stacks()
    except CfnSphereException as e:
        LOGGER.error(e)
        if debug:
            LOGGER.exception(e)
        sys.exit(1)
    except Exception as e:
        LOGGER.error("Failed with unexpected error")
        LOGGER.exception(e)
        LOGGER.info("Please report at https://github.com/cfn-sphere/cfn-sphere/issues!")
        sys.exit(1)
Example #13
0
def sync(config, parameter, suffix, debug, confirm, yes):
    confirm = confirm or yes
    if debug:
        LOGGER.setLevel(logging.DEBUG)
        boto3.set_stream_logger(name='boto3', level=logging.DEBUG)
        boto3.set_stream_logger(name='botocore', level=logging.DEBUG)
    else:
        LOGGER.setLevel(logging.INFO)

    if not confirm:
        check_update_available()
        click.confirm(
            'This action will modify AWS infrastructure in account: {0}\nAre you sure?'
            .format(get_first_account_alias_or_account_id()),
            abort=True)

    try:

        config = Config(config_file=config,
                        cli_params=parameter,
                        stack_name_suffix=suffix)
        StackActionHandler(config).create_or_update_stacks()
    except CfnSphereException as e:
        LOGGER.error(e)
        if debug:
            LOGGER.exception(e)
        sys.exit(1)
    except Exception as e:
        LOGGER.error("Failed with unexpected error")
        LOGGER.exception(e)
        LOGGER.info(
            "Please report at https://github.com/cfn-sphere/cfn-sphere/issues!"
        )
        sys.exit(1)
Example #14
0
    def __init__(self):
        """Initialize the object with data from the command line or environment
        variables. Log in into DCOS if username / password are provided.
        Set up logging according to the verbosity requested.
        """
        self.app_instances = 0
        self.trigger_var = 0
        self.cool_down = 0
        self.dcos_headers = {}

        self.parse_arguments()

        # Start logging
        if self.verbose:
            level = logging.DEBUG
        else:
            level = logging.INFO

        logging.basicConfig(level=level, format=self.LOGGING_FORMAT)

        # Override the boto logging level to something less chatty
        boto3.set_stream_logger(name='botocore',
                                level=logging.ERROR,
                                format_string=self.LOGGING_FORMAT)

        self.log = logging.getLogger("marathon-autoscaler")

        # Set auth header
        self.authenticate()
def __setup(local_download_dir_warc, log_level):
    """
    Setup
    :return:
    """
    os.makedirs(local_download_dir_warc, exist_ok=True)

    global __log_pathname_fully_extracted_warcs
    __log_pathname_fully_extracted_warcs = os.path.join(
        local_download_dir_warc, 'fullyextractedwarcs.list')

    # make loggers quiet
    configure_logging({"LOG_LEVEL": "ERROR"})
    logging.getLogger('requests').setLevel(logging.CRITICAL)
    logging.getLogger('readability').setLevel(logging.CRITICAL)
    logging.getLogger('PIL').setLevel(logging.CRITICAL)
    logging.getLogger('newspaper').setLevel(logging.CRITICAL)
    logging.getLogger('newsplease').setLevel(logging.CRITICAL)
    logging.getLogger('urllib3').setLevel(logging.CRITICAL)
    logging.getLogger('jieba').setLevel(logging.CRITICAL)

    boto3.set_stream_logger('botocore', log_level)
    boto3.set_stream_logger('boto3', log_level)

    # set own logger
    logging.basicConfig(level=log_level)
    __logger = logging.getLogger(__name__)
    __logger.setLevel(log_level)
Example #16
0
    def __init__(
        self, bucket, key, local_path, username, directory,
            part_size=int(15e6),
            profile_name=None, region_name="eu-west-1", verbose=False):

        if directory is None:
            path = '{}/{}'.format(username, key)
        else:
            path = '{}/{}/{}'.format(username, directory, key)
        self.bucket = bucket
        self.key = path
        self.path = local_path
        self.total_bytes = os.stat(local_path).st_size
        self.part_bytes = part_size
        self.config = TransferConfig(
            multipart_threshold=1024 * 25, max_concurrency=10,
            multipart_chunksize=1024 * 25, use_threads=True)
        assert part_size > self.PART_MINIMUM
        assert (self.total_bytes % part_size == 0
                or self.total_bytes % part_size > self.PART_MINIMUM)
        self.s3 = boto3.client(
            's3',
            aws_config.REGION,
            config=Config(s3={'addressing_style': 'path'}),
            aws_access_key_id=aws_config.AWS_ACCESS_KEY_ID,
            aws_secret_access_key=aws_config.AWS_ACCESS_KEY,
        )
        if verbose:
            boto3.set_stream_logger(name="botocore")
Example #17
0
    def __init__(
        self, key, secret, endpoint=None, zone=None, max_retries=None, trace=False
    ):
        self.zone = _DEFAULT_ZONE if zone is None else zone
        endpoint = (
            "https://sos-{}.exo.io".format(self.zone) if endpoint is None else endpoint
        )
        max_retries = 3 if max_retries is None else max_retries
        super().__init__(
            endpoint=endpoint,
            key=key,
            secret=secret,
            max_retries=max_retries,
            trace=trace,
        )

        if self.zone is None:
            raise ValueError("no storage zone specified")

        self.boto = boto3.client(
            "s3",
            region_name=self.zone,
            endpoint_url=self.endpoint,
            aws_access_key_id=key,
            aws_secret_access_key=secret,
            config=botocore.client.Config(
                user_agent="{} boto3/{} botocore/{}".format(
                    self.user_agent, boto3.__version__, botocore.__version__
                ),
                retries={"max_attempts": self.max_retries},
            ),
        )

        if trace:
            boto3.set_stream_logger(name="", level=logging.DEBUG)
Example #18
0
 def enable_connection_debug(self, level=DEBUG, format_string=None):
     try:
         level = Eulogger.format_log_level(level, 'DEBUG')
         set_stream_logger('botocore', level=level, format_string=None)
     except:
         self.log.error('Could not enable debug for: "{0}"'.format(self))
         raise
Example #19
0
def emr():
    """EMR mock service"""
    # TODO: implement fixture after moto is ready
    # https://github.com/spulec/moto/pull/456
    boto3.set_stream_logger()
    mock = mock_emr()
    mock.start()

    client = boto3.client('emr')
    clusters = []
    for i in range(2):
        cluster = client.run_job_flow(
            Name='cluster{}'.format(i),
            Instances={
                'MasterInstanceType': 'c3.xlarge',
                'SlaveInstanceType': 'c3.xlarge',
                'InstanceCount': 3,
                'Placement': {
                    'AvailabilityZone': 'ap-northeast-1a'
                },
                'KeepJobFlowAliveWhenNoSteps': True,
            },
            VisibleToAllUsers=True,
        )
        clusters.append(cluster)
    yield {'clusters': clusters}
    mock.stop()
Example #20
0
def emr():
    """EMR mock service"""
    # TODO: implement fixture after moto is ready
    # https://github.com/spulec/moto/pull/456
    boto3.set_stream_logger()
    mock = mock_emr()
    mock.start()

    client = boto3.client('emr')
    clusters = []
    for i in range(2):
        cluster = client.run_job_flow(
            Name='cluster{}'.format(i),
            Instances={
                'MasterInstanceType': 'c3.xlarge',
                'SlaveInstanceType': 'c3.xlarge',
                'InstanceCount': 3,
                'Placement': {'AvailabilityZone': 'ap-northeast-1a'},
                'KeepJobFlowAliveWhenNoSteps': True,
            },
            VisibleToAllUsers=True,
        )
        clusters.append(cluster)
    yield {'clusters': clusters}
    mock.stop()
Example #21
0
def main(args):

    boto3.set_stream_logger(name='botocore')

    dynamodb = boto3.Session(
        aws_access_key_id=g_aws_access_key_id,
        aws_secret_access_key=g_aws_secret_access_key,
        region_name=g_region_name).resource('dynamodb')

    table = dynamodb.Table('GreengrassDashboard-IoTGSDynamoDeviceStatusTable-1JGCAR33OAYSP')

    devices = {"hopper", "knuth", "turing"}
    while True:
        for device in devices:
            json_data = {}
            json_data["deviceId"] = device
            json_data["sensorReading"] = random.randrange(30, 41, 1)
            json_data["batteryCharge"] = random.randrange(-10, 21, 1)
            json_data["batteryDischargeRate"] = random.randrange(0, 6, 1)
            #json_data = json.dumps(json_data)
            print(json_data)

            table.put_item(Item=json_data)
            break
        break
        time.sleep(1)
    def _refreshAuth(self, refreshToken, cognitoUUID):
        client = boto3.client('cognito-idp',
                              aws_access_key_id=self._conf.ACCESSKEYID,
                              aws_secret_access_key=self._conf.SECRETACCESSKEY)

        hashVal = secretHash(cognitoUUID + self._conf.CLIENT_ID,
                             self._conf.CLIENT_SECRET)

        try:
            boto3.set_stream_logger('botocore', level='DEBUG')
            resp = client.admin_initiate_auth(
                UserPoolId=self._conf.USER_POOL_ID,
                ClientId=self._conf.CLIENT_ID,
                AuthFlow='REFRESH_TOKEN_AUTH',
                AuthParameters={
                    'REFRESH_TOKEN': refreshToken,
                    'SECRET_HASH': hashVal
                })
        except client.exceptions.NotAuthorizedException as e:
            return None, e.__str__()
        except client.exceptions.UserNotConfirmedException:
            return None, 'User is not confirmed'
        except Exception as e:
            return None, e.__str__()
        return resp, None
    def __init__(self,
                 s3_conn_id,
                 s3_bucket,
                 s3_bucket_no_acesss_point,
                 s3_key,
                 redshift_conn_id,
                 redshift_schema,
                 table,
                 copy_params=[],
                 origin_schema=None,
                 schema_location='s3',
                 load_type='append',
                 primary_key=None,
                 incremental_key=None,
                 foreign_key={},
                 distkey=None,
                 sortkey='',
                 sort_type='COMPOUND',
                 *args,
                 **kwargs):
        super().__init__(*args, **kwargs)
        self.s3_conn_id = s3_conn_id
        self.s3_bucket = s3_bucket
        self.s3_bucket_no_acesss_point = s3_bucket_no_acesss_point
        self.s3_key = s3_key
        self.redshift_conn_id = redshift_conn_id
        self.redshift_schema = redshift_schema.lower()
        self.table = table.lower()
        self.copy_params = copy_params
        self.origin_schema = origin_schema
        self.schema_location = schema_location
        self.load_type = load_type
        self.primary_key = primary_key
        self.incremental_key = incremental_key
        self.foreign_key = foreign_key
        self.distkey = distkey
        self.sortkey = sortkey
        self.sort_type = sort_type

        import boto3
        boto3.set_stream_logger('boto3.resources', logging.DEBUG)

        if self.load_type.lower() not in ("append", "rebuild", "truncate", "upsert"):
            raise Exception('Please choose "append", "rebuild", or "upsert".')

        if self.schema_location.lower() not in ('s3', 'local'):
            raise Exception('Valid Schema Locations are "s3" or "local".')

        if not (isinstance(self.sortkey, str) or isinstance(self.sortkey, list)):
            raise Exception('Sort Keys must be specified as either a string or list.')

        if not (isinstance(self.foreign_key, dict) or isinstance(self.foreign_key, list)):
            raise Exception('Foreign Keys must be specified as either a dictionary or a list of dictionaries.')

        if self.distkey and ((',' in self.distkey) or not isinstance(self.distkey, str)):
            raise Exception('Only one distribution key may be specified.')

        if self.sort_type.lower() not in ('compound', 'interleaved'):
            raise Exception('Please choose "compound" or "interleaved" for sort type.')
Example #24
0
 def __init__(self):
     self.session = boto3.session.Session()
     self.bucket = conf.AWS_BUCKET_NAME
     self.client = self.session.client(
         service_name='s3',
         aws_access_key_id=conf.AWS_ACCESS_KEY_ID,
         aws_secret_access_key=conf.AWS_SECRET_ACCESS_KEY)
     boto3.set_stream_logger('boto3.resources', logging.INFO)
Example #25
0
 def disable_connection_debug(self, level=NOTSET):
     try:
         self.connection.debug = 0
         level = Eulogger.format_log_level(level, 'NOTSET')
         set_stream_logger('botocore', level=level, format_string=None)
     except:
         self.log.error('Could not disable debug for: "{0}"'.format(self))
         raise
Example #26
0
 def session(self):
     '''Get a low-level session object or create one if needed'''
     if not self._session:
         if self.config.debug_mode:
             boto3.set_stream_logger(level=log.DEBUG)
         self._session = boto3.session.Session(
             region_name=self.region_name, **self.session_cfg)
     return self._session
Example #27
0
 def disable_connection_debug(self, level=NOTSET):
     try:
         self.connection.debug = 0
         level = Eulogger.format_log_level(level, 'NOTSET')
         set_stream_logger('botocore', level=level, format_string=None)
     except:
         self.log.error('Could not disable debug for: "{0}"'.format(self))
         raise
Example #28
0
def process_global_options(arg_dict):
    """ Take all of the global options and apply them

    Note: This can be run more than once, but the last used value will always win
    """
    config.aws_profile = arg_dict['--profile']
    _init_logging(arg_dict['--log'])
    boto3.set_stream_logger(name='botocore', level=arg_dict['--log-boto'])
 def get(self, path=""):
     """
     Takes a path and returns lists of files/objects
     and directories/prefixes based on the path.
     """
     boto3.set_stream_logger("boto3.resources", logging.DEBUG)
     boto3.set_stream_logger("botocore", logging.DEBUG)
     result = get_from_path(path)
     self.finish(json.dumps(result))
Example #30
0
 def __init__(self, region, retries=100):
     boto3.set_stream_logger('boto3', logging.INFO)
     self.session = boto3.Session()
     self.region = region
     self.retries = retries
     self._ec2_client = None
     self._ec2_resource = None
     self._cloudwatch_client = None
     self._iam_client = None
def complex_experiment_obj(sagemaker_boto_client):
    description = "{}-{}".format("description", str(uuid.uuid4()))
    boto3.set_stream_logger("", logging.INFO)

    # create experiment
    experiment_obj_name = name()
    experiment_obj = experiment.Experiment.create(
        experiment_name=experiment_obj_name, description=description, sagemaker_boto_client=sagemaker_boto_client
    )

    # create trials
    trial_objs = []
    trial_name1 = name()
    trial_name2 = name()
    trial_name3 = name()

    next_trial1 = trial.Trial.create(
        trial_name=trial_name1, experiment_name=experiment_obj_name, sagemaker_boto_client=sagemaker_boto_client,
    )
    trial_objs.append(next_trial1)
    next_trial2 = trial.Trial.create(
        trial_name=trial_name2, experiment_name=experiment_obj_name, sagemaker_boto_client=sagemaker_boto_client,
    )
    trial_objs.append(next_trial2)
    next_trial3 = trial.Trial.create(
        trial_name=trial_name3, experiment_name=experiment_obj_name, sagemaker_boto_client=sagemaker_boto_client,
    )
    trial_objs.append(next_trial3)

    # create trial components
    trial_component_name = name()
    trial_component_obj = trial_component.TrialComponent.create(
        trial_component_name=trial_component_name, sagemaker_boto_client=sagemaker_boto_client,
    )

    # associate the trials with trial components
    for trial_obj in trial_objs:
        sagemaker_boto_client.associate_trial_component(
            TrialName=trial_obj.trial_name, TrialComponentName=trial_component_obj.trial_component_name
        )
    time.sleep(1.0)
    yield experiment_obj
    experiment_obj.delete_all(action="--force")

    # load experiment and verify experiment got deleted
    with pytest.raises(sagemaker_boto_client.exceptions.ResourceNotFound):
        sagemaker_boto_client.describe_experiment(ExperimentName=experiment_obj_name)
    # load trials and verify trials got deleted
    with pytest.raises(sagemaker_boto_client.exceptions.ResourceNotFound):
        sagemaker_boto_client.describe_trial(TrialName=trial_name1)
    with pytest.raises(sagemaker_boto_client.exceptions.ResourceNotFound):
        sagemaker_boto_client.describe_trial(TrialName=trial_name2)
    with pytest.raises(sagemaker_boto_client.exceptions.ResourceNotFound):
        sagemaker_boto_client.describe_trial(TrialName=trial_name3)
    # load trial component and verify trial component got deleted
    with pytest.raises(sagemaker_boto_client.exceptions.ResourceNotFound):
        sagemaker_boto_client.describe_trial_component(TrialComponentName=trial_component_name)
 def put_object(self, body, key):
     boto3.set_stream_logger(name="botocore")
     self.logger.info(f"Uploading index for '{key}'.")
     self.s3_client.put_object(
         Body=body,
         Bucket=self.bucket_name,
         CacheControl="max-age=0",
         ContentType="text/html",
         Key=key,
     )
Example #33
0
def experiment_obj(sagemaker_boto_client):
    description = '{}-{}'.format('description', str(uuid.uuid4()))
    boto3.set_stream_logger('', logging.INFO)
    experiment_obj = experiment.Experiment.create(
        experiment_name=name(),
        description=description,
        sagemaker_boto_client=sagemaker_boto_client)
    yield experiment_obj
    time.sleep(0.5)
    experiment_obj.delete()
 def __init__(self):
     boto3.set_stream_logger("boto3", logging.WARN)
     logging.basicConfig(
         level=logging.INFO,
         format='%(asctime)s %(name)s %(levelname)s - %(message)s')
     self.logger = logging.getLogger(__name__)
     if os.getenv('AWS_ACCESS_KEY_ID') is None:
         raise AwsConfigException('AWS_ACCESS_KEY_ID not defined')
     if os.getenv('AWS_SECRET_ACCESS_KEY') is None:
         raise AwsConfigException('AWS_SECRET_ACCESS_KEY not defined')
Example #35
0
 def _setup_logger(self, eng_args_config):
     modules_logging_level = EeArg(
         eng_args_config.get('boto3_logging_level')).value
     if modules_logging_level:
         for module, level in modules_logging_level.items():
             logging_level = constants.LOG_LEVELS.get(
                 'info' if not level else level.lower(), logging.INFO)
             self._logger.info("Set logging level, '{}' ==> {}".format(
                 module, logging.getLevelName(logging_level)))
             boto3.set_stream_logger(module, logging_level)
def get_public_ip_address(instanceId):
    """When passed a tag key, tag value this will return a list of InstanceIds that were found."""
    ec2client = boto3.client('ec2')
    boto3.set_stream_logger('boto3.resources', logging.INFO)
    response = ec2client.describe_instances(InstanceIds=[instanceId])
    for reservation in (response["Reservations"]):
        for instance in reservation["Instances"]:
            public_ip_address = (instance["PublicIpAddress"])
            state_status = instance['State']['Name']
    return public_ip_address, state_status
def setup_logging(verbose=False):
    '''
    Setup logging

    :param verbose: bool - Enable verbose debug mode
    '''

    ch = logging.StreamHandler()
    ch.setFormatter(logging.Formatter('[%(asctime)s] %(message)s'))
    logger.addHandler(ch)
    logger.setLevel(logging.INFO)
    if verbose:
        logger.setLevel(logging.DEBUG)
        boto3.set_stream_logger('boto3.resources', logging.DEBUG)
Example #38
0
def main():

    parser = argparse.ArgumentParser(
        prog='athena',
        usage='athena [--debug] [--execute <execute>] [--output-format <output-format>] [--schema <schema>] [--version]'
              ' [--region <region>] [--s3-bucket <bucket>]',
        description='Athena interactive console'
    )
    parser.add_argument(
        '--debug',
        action='store_true'
    )
    parser.add_argument(
        '--execute'
    )
    parser.add_argument(
        '--output-format'
    )
    parser.add_argument(
        '--schema',
        '--database',
        '--db'
    )
    parser.add_argument(
        '--version',
        action='store_true'
    )
    parser.add_argument(
        '--region',
        default=AWS_REGION
    )
    parser.add_argument(
        '--s3-bucket',
        '--bucket',
        dest='bucket',
        default=S3_RESULTS_BUCKET
    )
    args = parser.parse_args()

    if args.debug:
        boto3.set_stream_logger(name='botocore')

    if args.version:
        print('Athena CLI %s' % __version__)
        sys.exit()

    shell = AthenaShell(args.region, args.bucket, args.schema, args.debug)
    shell.cmdloop_with_cancel()
Example #39
0
def main(args):
    project = get_project(args.datafile)
    
    region = os.getenv("AWS_DEFAULT_REGION", args.region)
    kw = {}
    if not os.getenv("AWS_PROFILE"):
        kw = dict(aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
            aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
            aws_session_token=os.getenv('AWS_SESSION_TOKEN'),)

    if args.debug:
        boto3.set_stream_logger(name='botocore')
        trollius_log(level=logging.DEBUG)

    session = boto3.session.Session(region_name=region, **kw)
    storage = S3(session=session, vault=args.vault, vaultkey=args.vaultkey, env=args.env, project=project)

    method = getattr(storage, args.action)
    result = yield From(method(**vars(args)))
    prettyprint(result)
Example #40
0
def set_boto_logger_level(level='NOTSET', format_string=None):
    """
    Set the global boto loggers levels to 'level'. ie "DEBUG", "INFO", "CRITICAL"
    default is "NOTSET"
    :param level: string matching logging class levels, or integer representing the equiv value
    :param format_string: logging class formatter string
    """
    level = Eulogger.format_log_level(level, 'NOTSET')
    set_stream_logger('boto', level=level, format_string=None)
    set_stream_logger('boto3', level=level, format_string=None)
    set_stream_logger('botocore', level=level, format_string=None)
Example #41
0
import datetime
import base64
import json
import argparse
import sys

# Import third party libs
import requests
import boto3

# shut up requests module
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)

# shut up boto3 and botocore
boto3.set_stream_logger(level=logging.WARNING)
logging.getLogger('botocore').setLevel(logging.WARNING)


def get_service(url, from_context, to_context, auth_key, token_lifetime=1):
    '''
    Read secret data from Confidant via its API.
    '''
    # Return a dict, always with an attribute that specifies whether or not the
    # function was able to successfully get a result.
    ret = {'result': False}
    # Populate the auth encryption context dict that'll be used for KMS.
    auth_context = {
        'from': from_context,
        'to': to_context
    }
Example #42
0
        print( "504" )
        sys.exit( 4 )

#except:
#    myLog( "error", "No support for docType: " + docType )
#    print( "505" )
#    sys.exit( 5 )

#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

#try: # Upload to s3
if True:
    s3 = boto3.client( 's3', AWS_REGION,
        aws_access_key_id=ACCESS_KEY,
        aws_secret_access_key=SECRET_KEY )
    boto3.set_stream_logger('botocore', level='DEBUG')
    myLog( "info", "About to upload to bucket: " + door43Bucket + " from: " + outDir )
    os.chdir( outDir )
    src = dest # like "bspidel/gaj-x-ymnk_obs_text_obs/d2bc0dcb/html"
    outPath = bucket + dest
    tpl = 1
    myLog( "info", "cwd: " + os.getcwd() + " src: " + src )

    for root, dirs, files in os.walk( src ):
        myLog( "detail", "root: " + root )
        c = 0

        for nme in files:
            srcPath =  join( root, nme )
            myLog( "loops", "srcPath: " + srcPath +  "  Bucket: " + door43Bucket )  
            s3.upload_file( srcPath, bucket, "u/" + srcPath )
##########################################################################
""" S3 Clean Up

CloudFormation Custom Resource Lambda Function
"""

import logging
import boto3
import cfnresponse

DEFAULT_LOGGING_LEVEL = logging.INFO
logging.basicConfig(format='[%(levelname)s] %(message)s', level=DEFAULT_LOGGING_LEVEL)
logger = logging.getLogger(__name__)
logger.setLevel(DEFAULT_LOGGING_LEVEL)

boto3.set_stream_logger('boto3', level=DEFAULT_LOGGING_LEVEL)

s3_client = boto3.client('s3')
s3_resource = boto3.resource('s3')

def get_buckets_from_properties(resource_properties):
    buckets = resource_properties.get('Buckets')
    logger.info('buckets in properties {}'.format(buckets))
    if type(buckets) != list:
        raise ValueError('invalid Buckets property type - not an array')
    if not len(buckets):
        raise ValueError('empty Buckets property')
    for bucket in buckets:
        bucket_type = type(bucket)
        if not (bucket_type == str or bucket_type == unicode):
            raise ValueError(
Example #44
0
	def get_resource(self):
		boto3.set_stream_logger('botocore', logging.WARNING)
		boto3.set_stream_logger('boto3.resources', logging.WARNING)
		return boto3.resource('dynamodb')
#!/usr/bin/env python3

import botocore.exceptions
import boto3
import logging
import os

boto3.set_stream_logger(level=logging.INFO)

Constants = {
    # Overview stuff
    'ImageId': 'ami-dfcab0b5',
    'InstanceType': 'r3.xlarge',
    'DatabaseVolumeGb': '500',
    'SearchindexVolumeGb': '300',

    # VPC and subnet where our instances go
    'VpcName': 'overview',
    'VpcCidr': '10.0.0.0/16',
    'SubnetName': 'overview',
    'SubnetCidr': '10.0.0.0/24',
    'SubnetAvailabilityZone': 'us-east-1a',

    # DNS stuff on Route 53
    'HostedZoneId': 'Z372XVIFF1FHQW',
    'DnsNames': {
        'production': [ 'www.overviewdocs.com', 'overviewdocs.com' ],
        'staging': [ 'staging.overviewdocs.com', 'staging-redirect.overviewdocs.com' ],
    },

    # ELB requires at least two availability zones, so we'll create a second one
Example #46
0
import os
import boto3

from xdfile.utils import log, info, debug, error


boto3.set_stream_logger('botocore')

def xd_send_email(destaddr, fromaddr='*****@*****.**', subject='', body=''):
    client = boto3.client('ses', region_name=os.environ['REGION'])
    info("sending email to %s (subject '%s')" % (destaddr, subject))
    try:
        response = client.send_email(
                Source=fromaddr,
                Destination= {'ToAddresses': [ destaddr ] },
                Message={ 'Subject': { 'Data': subject },
                'Body': { 'Text': { 'Data': body } } })
        return response
    except Exception as e:
        error("xd_send_email(): %s" % str(e))
        return None


def create_merge_request():
    import urllib.request
    import urllib.parse
    parms = {
        'id': '',
        'source_branch': '',
        'target_branch': '',
        'title': '',
Example #47
0
def main():
    """The main runner for our script."""

    parser = get_args()
    args = parser.parse_args()

    if args.verbose:
        LOG.setLevel(logging.INFO)
        LOG.info('Verbose: on')
    else:
        ## If not verbose, turn down boto3.
        boto3.set_stream_logger(name='boto3', level=logging.WARNING)
        boto3.set_stream_logger(name='botocore', level=logging.WARNING)
        logging.getLogger("requests").setLevel(logging.WARNING)

    ## Ensure credentials.
    if not args.credentials:
        die_screaming('need a credentials argument')
    LOG.info('Will use credentials: ' + args.credentials)
    ## Ensure directory.
    if not args.directory:
        die_screaming('need a directory argument')
    args.directory = args.directory.rstrip('//')
    LOG.info('Will operate in: ' + args.directory)
    ## Ensure bucket.
    if not args.bucket:
        die_screaming('need a bucket argument')
    bucket, slash, toppath = args.bucket.partition('/')
    if toppath != '':
        LOG.info('Will put to bucket: ' + bucket + '; with path: ' + toppath)
    else:
        LOG.info('Will put to bucket at top level: ' + bucket)
    ## Ensure mimetype metadata.
    if not args.mimetypes:
        LOG.info('Will use internal mimetype defaults')
    else:
        LOG.info('TODO: Will get mimetype metadata from: ' + args.metadata)
    ## Ensure bucket location.
    if not args.location:
        args.location = 'us-east-1'
        LOG.info('Will use S3 bucket location default: ' + args.location)
    else:
        LOG.info('Will use S3 bucket location: ' + args.location)

    ## Extract S3 credentials.
    creds = None
    with open(args.credentials) as chandle:
        creds = json.loads(chandle.read())
    #LOG.info(creds)

    s3 = boto3.resource('s3', region_name=args.location,
                          aws_access_key_id=creds['accessKeyId'],
                          aws_secret_access_key=creds['secretAccessKey'])

    # s3 = boto3.resource("s3", creds['accessKeyId'], creds['secretAccessKey'])

    #s3.Object('mybucket', 'hello.txt').put(Body=open('/tmp/hello.txt', 'rb'))

    ## Walk tree.
    for curr_dir, dirs, files in os.walk(args.directory):

        ## We can navigate up if we are not in the root.
        relative_to_start = curr_dir.rstrip('//')[len(args.directory):]
        relative_to_start = relative_to_start.lstrip('//')
        LOG.info('curr_dir: ' + curr_dir + ' (' + relative_to_start + ')')

        ## Note files and directories.
        for fname in files:

            ## Get correct mime type.
            fext = os.path.splitext(fname)[1].lstrip('.')
            mime = MIMES.get('') # start with default
            if MIMES.get(fext, False):
                mime = MIMES.get(fext)

            ## Figure out S3 path/key and final filename, keeping in
            ## mind that relative_to_Start can be empty if root.
            s3path = fname
            if relative_to_start:
                s3path = relative_to_start + '/' + fname
            filename = os.path.join(curr_dir, fname)

            tags = {}
            if args.number:
                tags['build-number'] = args.number
            if args.pipeline:
                tags['build-pipeline'] = args.pipeline
            tags_str = urllib.parse.urlencode(tags)

            ## Visual check.
            LOG.info('file: ' + filename)
            if toppath != '':
                s3path = toppath + '/' + s3path
            LOG.info(' -> [' + bucket + '] ' + s3path + \
                      '(' + mime + ', ' + tags_str + ')')

            ## Create the new object that we want.
            s3bucket = s3.Bucket(bucket)
            multipart_upload(filename, s3bucket, s3path, content_type=mime, metadata=tags, policy="public-read")
Example #48
0
from os.path import join
from . import utils, config, project, decorators # BE SUPER CAREFUL OF CIRCULAR DEPENDENCIES
from .decorators import testme
from .utils import ensure, first, lookup, lmap, lfilter, unique, isstr
import boto3
import botocore
from contextlib import contextmanager
from fabric.api import settings, execute, env, parallel, serial, hide, run, sudo
from fabric.exceptions import NetworkError
from fabric.state import output
from slugify import slugify
import logging
from kids.cache import cache as cached

LOG = logging.getLogger(__name__)
boto3.set_stream_logger(name='botocore', level=logging.INFO)

class DeprecationException(Exception):
    pass

class NoMasterException(Exception):
    pass


ALL_CFN_STATUS = [
    'CREATE_IN_PROGRESS',
    'CREATE_FAILED',
    'CREATE_COMPLETE',
    'ROLLBACK_IN_PROGRESS',
    'ROLLBACK_FAILED',
    'ROLLBACK_COMPLETE',
Example #49
0
    if args.debug:
        loglvl = logging.DEBUG
        bloglvl = logging.ERROR

    if args.debug_boto:
        loglvl = logging.DEBUG
        bloglvl = logging.INFO

    logger.setLevel(loglvl)

    conslog = logging.StreamHandler()
    conslog.setFormatter(format)
    conslog.setLevel(loglvl)
    logger.addHandler(conslog)
    boto3.set_stream_logger('boto3.resources', bloglvl)

    #FIXME: need to figure out where to store template
    template = open('/usr/share/grabcloud/template.json').read()
    logger.debug( "read template from cloudformation/template.json")

    if args.action == 'configure':
        logger.debug( "starting configuration process" )
        configure_app()

    if args.action == 'start':
        logger.debug( "starting instance" )

        c = get_config()
        start(
            stackname = stackname,
Example #50
0
import boto3
# import botocore
import unittest
import logging
import time
import os
import requests
import tempfile
import ssl
from functools import partial
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from AWSIoTPythonSDK.exception.AWSIoTExceptions import connectTimeoutException


boto3.set_stream_logger('botocore', logging.WARN)
log = logging.getLogger('cloudcam.test')
log.setLevel(logging.DEBUG)

iotcore = logging.getLogger("AWSIoTPythonSDK.core")
iotcore.setLevel(logging.DEBUG)


class Client():
    def __init__(self, iot_client=None, iot_data_client=None, credentials=None, ca_path=None, privkey_path=None, cert_path=None):
        assert ca_path, "Certificate is required"
        if not iot_client:
            iot_client = boto3.client('iot')
        if not iot_data_client:
            iot_data_client = boto3.client('iot-data')
        self.iot_client = iot_client
        self.iot_data_client = iot_data_client
Example #51
0
import boto3
import logging
import redis

from flask import Flask
from flask.ext.session import Session
from flask_sslify import SSLify
from confidant import lru
from confidant import settings

if not settings.get('DEBUG'):
    boto3.set_stream_logger(level=logging.CRITICAL)
    logging.getLogger('botocore').setLevel(logging.CRITICAL)
    logging.getLogger('pynamodb').setLevel(logging.WARNING)
log = logging.getLogger(__name__)

static_folder = settings.get('STATIC_FOLDER')

app = Flask(__name__, static_folder=static_folder)
app.config.from_object(settings)
app.debug = app.config['DEBUG']

if app.config['SSLIFY']:
    sslify = SSLify(app, skips=['healthcheck'])

cache = lru.LRUCache(2048)

if app.config.get('REDIS_URL'):
    app.config['SESSION_REDIS'] = redis.Redis.from_url(
        app.config['REDIS_URL']
    )
Example #52
0
	def setUp(self):
		boto3.set_stream_logger('botocore', logging.WARNING)
		dynamodb = boto3.resource('dynamodb')
		self.config_table = dynamodb.Table('config')
Example #53
0
import logging

import boto3

FMT = '[%(levelname)s:%(name)s]:(%(asctime)s):%(message)s'
FMT = '%(asctime)s [%(levelname)s] %(name)s : %(message)s'

logging.basicConfig(
    filename='stream.log', format=FMT, level=logging.INFO)

boto3.set_stream_logger(name='boto3', level=logging.WARNING, format_string=FMT)

# Note: if level is set to logging.DEBUG, boto3 prints full text of every putItem.
 def setUp(self):
     boto3.set_stream_logger('botocore', logging.WARNING)
     dynamodb = boto3.resource('dynamodb')
     self.result_table = dynamodb.Table('scan_result')
     self.results_repository = src.ResultsRepository(self.result_table)
Example #55
0
#!/usr/bin/env python3

import logging
import boto3

boto3.set_stream_logger('boto3.resources', logging.INFO)

ec2 = boto3.client('ec2')

# create a volume
ec2.create_volume(Size=1,AvailabilityZone='us-east-1b')

# iterate through existing volumes
ebs = boto3.resource('ec2')
vols = ebs.volumes.all()
for vol in vols:
    print(vol.id)

# attach a volume to an instance
instances = ebs.instances.all()
for instance in instances:
    for tag in instance.tags:
        if tag['Key'] == 'Name' and tag['Value'] == 'tomtestEC2':
            instance.attach_volume(VolumeId='vol-053c19f10d771b340', Device='/dev/xvdh')

# create a snapshot
ec2.create_snapshot(VolumeId='vol-053c19f10d771b340')

# detach a volume
for vol in vols:
    if vol.id == 'vol-053c19f10d771b340':