def post(self, indexer: ElasticSearchIndex, room) -> dict: """ This wil return a location, kind of 'Camden, London'. We need to have some data like lat/lon for that input. """ indexer.index({}) return room
def post(self, indexer: ElasticSearchIndex, room: dict) -> dict: """ This wil return a location, kind of 'Camden, London'. We need to have some data like lat/lon for that input. """ if indexer.exists_by_url(room['url']): # 409 HTTP Conflict return room, 409 # Generates a unique ID for the room room['id'] = str(uuid.uuid4()) if not indexer.index(room): return {"error": "Room not saved"}, 400 return room, 201
def configure(binder: Binder) -> Binder: binder.bind( ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory( os.environ['ELASTICSEARCH_HOST'], os.environ['ELASTICSEARCH_PORT'], ), 'rooms', 'room', room_mapping)) return binder
def configure(binder: Binder) -> Binder: binder.bind( ItemsProvider, ItemsProvider([{ "Name": "Test1" }]), ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory(os.environ['ELASTICSEARCH_HOST'], os.environ['ELASTICSEARCH_PORT']), 'rooms', 'room', room_mapping)) return binder
def configure(binder: Binder) -> Binder: load_dotenv(find_dotenv()) binder.bind( Firestore, Firestore(os.getenv('AUTOSLANCES_CREDENTIALS_PATH'), os.getenv('AUTOSLANCES_DATABASE_NAME'))) binder.bind(Washer) binder.bind( ElasticSearchIndex, ElasticSearchIndex( ElasticSearchFactory(), { 'vehicles': vehicle_mapping, 'brands': brand_mapping, 'models': model_mapping, 'versions': version_mapping })) return binder
def list(self, indexer: ElasticSearchIndex) -> list: return indexer.list(), 200
def get(self, indexer: ElasticSearchIndex, _id: str) -> list: return indexer.get(_id), 200
def delete(self, indexer: ElasticSearchIndex, _id: str) -> dict: if not indexer.delete(_id): return {"error": "Room not removed"}, 400 return None, 200
import datetime from services.elasticsearch import ElasticSearchIndex, ElasticSearchFactory from conf.elasticsearch_mapper import daily_bucket_storageclass_size_mapping # boto3.set_stream_logger('', 10) sts_client = boto3.client('sts') s3 = boto3.resource('s3') cloudwatch = boto3.resource('cloudwatch') pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex(ElasticSearchFactory( 'localhost', 9200, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping) storageTypes = [{ 'StorageType': 'StandardStorage', 'StorageClass': 'STANDARD' }, { 'StorageType': 'IntelligentTieringStorage', 'StorageClass': 'INTELLIGENT_TIERING' }, { 'StorageType': 'StandardIAStorage', 'StorageClass': 'STANDARD_IA' }, { 'StorageType': 'OneZoneIAStorage', 'StorageClass': 'ONEZONE_IA'
's3', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) cloudwatch = boto3.resource( 'cloudwatch', aws_access_key_id=credentials['AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'], aws_session_token=credentials['SessionToken'], ) pp = pprint.PrettyPrinter(indent=4) indexer = ElasticSearchIndex( ElasticSearchFactory( ELASTICSEARCH_HOST, ELASTICSEARCH_PORT, ), 'daily_bucket_storageclass_sizes', 'daily_bucket_storageclass_size', daily_bucket_storageclass_size_mapping) storageTypes = [{ 'StorageType': 'StandardStorage', 'StorageClass': 'STANDARD' }, { 'StorageType': 'IntelligentTieringStorage', 'StorageClass': 'INTELLIGENT_TIERING' }, { 'StorageType': 'StandardIAStorage', 'StorageClass': 'STANDARD_IA' }, { 'StorageType': 'OneZoneIAStorage', 'StorageClass': 'ONEZONE_IA'