def configure(binder: Binder) -> Binder: binder.bind( ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory( os.environ['ELASTICSEARCH_HOST'], os.environ['ELASTICSEARCH_PORT'], ), 'rooms', 'room', room_mapping)) return binder
def configure(binder: Binder) -> Binder: binder.bind( ItemsProvider, ItemsProvider([{ "Name": "Test1" }]), ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory(os.environ['ELASTICSEARCH_HOST'], os.environ['ELASTICSEARCH_PORT']), 'rooms', 'room', room_mapping)) return binder
def configure(binder: Binder) -> Binder: load_dotenv(find_dotenv()) binder.bind( Firestore, Firestore(os.getenv('AUTOSLANCES_CREDENTIALS_PATH'), os.getenv('AUTOSLANCES_DATABASE_NAME'))) binder.bind(Washer) binder.bind( ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory(), { 'vehicles': vehicle_mapping, 'brands': brand_mapping, 'models': model_mapping, 'versions': version_mapping })) return binder
import pprint import datetime from services.elasticsearch import ElasticSearchIndex, ElasticSearchFactory from conf.elasticsearch_mapper import daily_bucket_storageclass_size_mapping # boto3.set_stream_logger('', 10) sts_client = boto3.client('sts') s3 = boto3.resource('s3') cloudwatch = boto3.resource('cloudwatch') pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex(ElasticSearchFactory( 'localhost', 9200, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping) storageTypes = [{ 'StorageType': 'StandardStorage', 'StorageClass': 'STANDARD' }, { 'StorageType': 'IntelligentTieringStorage', 'StorageClass': 'INTELLIGENT_TIERING' }, { 'StorageType': 'StandardIAStorage', 'StorageClass': 'STANDARD_IA' }, { 'StorageType': 'OneZoneIAStorage', 'StorageClass': 'ONEZONE_IA'
's3', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) cloudwatch = boto3.resource( 'cloudwatch', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex( ElasticSearchFactory( ELASTICSEARCH_HOST, ELASTICSEARCH_PORT, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping) storageTypes = [{ 'StorageType': 'StandardStorage', 'StorageClass': 'STANDARD' }, { 'StorageType': 'IntelligentTieringStorage', 'StorageClass': 'INTELLIGENT_TIERING' }, { 'StorageType': 'StandardIAStorage', 'StorageClass': 'STANDARD_IA' }, { 'StorageType': 'OneZoneIAStorage', 'StorageClass': 'ONEZONE_IA'
aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) cloudwatch=boto3.resource( 'cloudwatch', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex( ElasticSearchFactory( ELASTICSEARCH_HOST, ELASTICSEARCH_PORT, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping ) storageTypes = [ { 'StorageType': 'StandardStorage', 'StorageClass': 'STANDARD' }, { 'StorageType': 'IntelligentTieringStorage', 'StorageClass': 'INTELLIGENT_TIERING' },
ExternalId='NOTHING_TO_SEE_HERE', DurationSeconds=43200 ) credentials=assumed_role_object['Credentials'] s3=boto3.resource( 's3', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex( ElasticSearchFactory( 'localhost', 9200, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping ) def object_crawler(): object_counter = 0 for bucket in s3.buckets.all(): print('Crawling bucket: ' + bucket.name) storage_type_agg = {} for s3object in bucket.object_versions.all(): if s3object.size == None: # print('Skipping ' + s3object.object_key) continue