def get_settings(): settings = {} try: items = boto3.client("dynamodb").scan( TableName=os.environ.get("SETTINGSTABLE") )["Items"] except Exception as error: raise error else: for item in items: item_json = dynamodb_json.loads(item, True) settings[item_json.get("key")] = item_json.get("value") return settings
def anime_by_broadcast_generator(day_of_week, limit=100): paginator = _get_client().get_paginator('query') page_iterator = paginator.paginate( TableName=DATABASE_NAME, IndexName="broadcast_day", KeyConditionExpression="broadcast_day=:day_of_week", ExpressionAttributeValues={":day_of_week": {"S": str(day_of_week)}}, Limit=limit, ScanIndexForward=False ) for p in page_iterator: for i in p["Items"]: yield json_util.loads(i)
def get_whitelist(self): whitelist = {} try: for record in boto3.client('dynamodb').scan(TableName=os.environ['WHITELISTTABLE'])['Items']: record_json = dynamodb_json.loads(record, True) parsed_resource_id = LambdaHelper.parse_resource_id(record_json.get('resource_id')) whitelist.setdefault( parsed_resource_id.get('service'), {}).setdefault( parsed_resource_id.get('resource_type'), []).append( parsed_resource_id.get('resource')) except: self.logging.error("Could not read DynamoDB table '%s'." % os.environ['WHITELISTTABLE']) return whitelist
def delete(self, key: str, **kwargs) -> Any: response = self._ddb_client.delete_item(TableName=self._ddb_table, Key=json_util.dumps( { 'pk': key, 'sk': 'CacheItem', }, as_dict=True), ReturnValues='ALL_OLD') if 'Attributes' in response: return json_util.loads(response['Attributes'], as_dict=True)['value'] return None
def get_user_items(username, index_name=None, status_filter=None): paginator = _get_client().get_paginator('query') query_kwargs = { "TableName": REVIEWS_DATABASE_NAME, "KeyConditionExpression": "username = :username AND " "begins_with(api_info, :api_info) ", "ExpressionAttributeValues": { ":username": { "S": username }, ":api_info": { "S": "i_" } }, "ScanIndexForward": False, "FilterExpression": "attribute_not_exists(deleted_at)" } if index_name is not None: query_kwargs["IndexName"] = index_name if status_filter is not None: st_filter = " and #status = :status" query_kwargs["FilterExpression"] += st_filter query_kwargs["ExpressionAttributeNames"] = { "#status": "status", } query_kwargs["ExpressionAttributeValues"][":status"] = { "S": status_filter } if index_name in ["ep_progress", "special_progress"]: key_exp = " AND #index_name < :progress" query_kwargs["KeyConditionExpression"] += key_exp query_kwargs["ExpressionAttributeNames"] = { "#index_name": index_name, } query_kwargs["ExpressionAttributeValues"][":progress"] = {"N": "100"} log.debug(f"Query kwargs: {query_kwargs}") page_iterator = paginator.paginate(**query_kwargs) res = [] for p in page_iterator: for i in p["Items"]: i = json_util.loads(i) res.append(i) return res
def get_asset_with_password(custom_id): print("In POST") password = None try: print(parse_qs(app.current_request.raw_body.decode()).get('password')) password = parse_qs( app.current_request.raw_body.decode()).get('password')[0] print(password) except Exception as e: print(e) with open('chalicelib/unauthorized.html', 'r') as f: unauthorized_page = f.read() return utils.make_response(401, unauthorized_page, { 'Content-Type': 'text/html' }) try: response = DYNAMODB.query(TableName=TABLE_NAME, KeyConditionExpression="RANDOM_URI = :id", FilterExpression="EXPIRES >= :current_time", ExpressionAttributeValues={ ":id": { "S": custom_id }, ":current_time": { "N": str(int(time.time())) } }) print(response) except Exception as e: print(e) return utils.make_response(500, { "message": "Something went wrong on our end. Please try again in some time." }) if bcrypt.checkpw(password.encode('utf-8'), json_util.loads(response["Items"][0])["PASSWORD_HASH"].encode('utf-8')): print("Key", response["Items"][0]["KEY"]["S"]) url = utils.download_url(response["Items"][0]["KEY"]["S"]) return utils.make_response(302, {}, { "Location": url }) else: with open('chalicelib/unauthorized.html', 'r') as f: unauthorized_page = f.read() return utils.make_response(401, unauthorized_page, { 'Content-Type': 'text/html' })
def get_count_by_count_date_range(self, supplier_id, min_count_date, max_count_date=None): table = 'brewoptix-counts' min_count_date = maya.parse(min_count_date.split('T')[0]).epoch if max_count_date: max_count_date = maya.parse(max_count_date.split('T')[0]).epoch if max_count_date: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('count_date').between(min_count_date, max_count_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_count_date' } else: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('count_date').gt(min_count_date), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True), 'IndexName': 'by_supplier_id_and_count_date' } response = self._storage.get_items(table, query) counts_obj = [] for item in response['Items']: # The 4 lines below can be uncommented if we move # from ALL to KEYS_ONLY for the table # entity_id = item['EntityID'] # count = self._storage.get(table, entity_id) # count = clean(count) count = json_util.loads(clean(item)) count['count_date'] = maya.to_iso8601( datetime.utcfromtimestamp(count['count_date'])).split('T')[0] counts_obj.append(count) return counts_obj
def get_count_by_count_date_range(self, supplier_id, min_count_date, max_count_date=None): obj_type = 'counts' min_count_date = maya.parse(min_count_date.split('T')[0]).epoch print(min_count_date) if max_count_date: max_count_date = maya.parse(max_count_date.split('T')[0]).epoch print(max_count_date) if max_count_date: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('obj_type').eq(obj_type), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True) & Attr('count_date').between(min_count_date, max_count_date), 'IndexName': 'by_supplier_id_and_obj_type' } else: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier_id) & Key('obj_type').eq(obj_type), 'FilterExpression': Attr('latest').eq(True) & Attr('active').eq(True) & Attr('count_date').gt(min_count_date), 'IndexName': 'by_supplier_id_and_obj_type' } response = self._storage.get_items(query) counts_obj = [] for item in response['Items']: count = json_util.loads(clean(item)) count['count_date'] = maya.to_iso8601( datetime.utcfromtimestamp(count['count_date'])).split('T')[0] counts_obj.append(count) return counts_obj
def check_user_buying_power(user, buying_power_used): user_data = djson.loads( db_client.get_item( TableName=table_name, Key=djsonify({"pk": user, "sk": "user"}), ) ) if user_data.get("Item") is None: buying_power = create_user(user) else: buying_power = user_data["Item"]["buying_power"] if buying_power < buying_power_used: raise InputError("Not enough buying power") return buying_power
def get_neg_skills(): analysis_df = pd.DataFrame(json.loads( analysis_table.scan()['Items'])).fillna(False) analysis_df = sort_jobs(analysis_df) trimmed_df = analysis_df.copy() if 'JobId' in trimmed_df.columns: trimmed_df = trimmed_df.drop('JobId', axis=1) for index, row in skills.iterrows(): if row['have']: # print("Dropping " + row['skill_name']) if row['skill_name'] in trimmed_df: trimmed_df = trimmed_df.drop(row['skill_name'], axis=1) skill_scores = 3 * trimmed_df.iloc[0:10].sum(axis=0) skill_scores += 2 * trimmed_df.iloc[10:20].sum(axis=0) skill_scores += 1 * trimmed_df.iloc[20:30].sum(axis=0) return skill_scores.to_json()
def handler(event, context): count = 0 print(event) for record in event["Records"]: # Get the primary key for use as the Elasticsearch ID id = record["dynamodb"]["Keys"]["asin"]["S"] if record['eventName'] == 'REMOVE': r = requests.delete(url + id, auth=awsauth) else: document = json.loads(record['dynamodb']['NewImage']) print(document) r = requests.put(url + id, auth=awsauth, json=document, headers=headers) count += 1 return str(count) + " records processed."
def lambda_handler(event, context): records =[] specific_order = ['A','Á','B','C','D','E','É','F','G','H','I','Í','J','K','L','M','N','Ñ','O','Ó','P','Q','R','S','T','U','Ú','Ü','V','W','X','Y','Z'] try: language = event['pathParameters']['language'] categoryId = event['pathParameters']['categoryId'] e = {'#s': 'STATUS'} f = '#s = :stat' response = dynamodb.query( TableName="TuCita247", ReturnConsumedCapacity='TOTAL', KeyConditionExpression='PKID = :categories', ExpressionAttributeNames=e, FilterExpression=f, ExpressionAttributeValues={ ':categories': {'S': 'CAT#' + categoryId}, # ':subcat': {'S': 'SUB#'}, ':stat' : {'N': '1'} }, ) for row in json_dynamodb.loads(response['Items']): recordset = { 'SubCategoryId': row['SKID'].replace('SUB#',''), 'Name': row['NAME_ENG'] if language.upper() == 'EN' else row['NAME_ESP'], 'Icon': row['ICON'], 'Imagen': row['IMG_CAT'] } records.append(recordset) # records.sort(key=getKey) records.sort(key=lambda v: specific_order.index(v['Name'][0:1])) statusCode = 200 body = json.dumps(records) except Exception as e: statusCode = 500 body = json.dumps({'Message': 'Error on request try again ' + str(e)}) response = { 'statusCode' : statusCode, 'headers' : { "content-type" : "application/json", "access-control-allow-origin" : "*" }, 'body' : body } return response
def lambda_handler(event, context): stage = event['headers'] if stage['origin'] != "http://localhost:4200": cors = os.environ['prodCors'] else: cors = os.environ['devCors'] try: businessId = event['pathParameters']['businessId'] roleId = event['pathParameters']['roleId'] response = dynamodb.query( TableName="TuCita247", ReturnConsumedCapacity='TOTAL', KeyConditionExpression= 'PKID = :businessId AND begins_with(SKID , :access)', ExpressionAttributeValues={ ':businessId': { 'S': 'BUS#' + businessId }, ':access': { 'S': 'ACCESS#' + roleId + '#' } }) access = [] for row in json_dynamodb.loads(response['Items']): recordset = { 'AppId': row['SKID'].replace('ACCESS#' + roleId + '#', ''), 'Access': row['LEVEL_ACCESS'] } access.append(recordset) statusCode = 200 body = json.dumps({'Code': 200, 'Access': access}) except Exception as e: statusCode = 500 body = json.dumps({'Message': 'Error on request try again'}) response = { 'statusCode': statusCode, 'headers': { "content-type": "application/json", "access-control-allow-origin": cors }, 'body': body } return response
def get_all_package_types(self, supplier): table = 'brewoptix-package-types' if isinstance(supplier, list): response_items = [] for item in supplier: query = { 'KeyConditionExpression': Key('supplier_id').eq(item), 'FilterExpression': (Attr('latest').eq(True) & Attr('active').eq(True)), 'IndexName': 'by_supplier_id' } response = self._storage.get_items(table, query) response_items.extend(response['Items']) else: query = { 'KeyConditionExpression': Key('supplier_id').eq(supplier), 'FilterExpression': (Attr('latest').eq(True) & Attr('active').eq(True)), 'IndexName': 'by_supplier_id' } response = self._storage.get_items(table, query) response_items = response['Items'] package_types_obj = [] for item in response_items: # The 4 lines below can be uncommented if we move # from ALL to KEYS_ONLY for the table # entity_id = item['EntityID'] # package_type = self._storage.get(table, entity_id) # package_type = clean(package_type) package_type = json_util.loads(clean(item)) package_types_obj.append(package_type) # sort by ordinal # To keep things backward compatible the sorting is handled the way below # TODO: remove this backward comaptible logic in future package_types_obj = sorted([item for item in package_types_obj if "ordinal" in item], key=lambda item: item["ordinal"]) + \ [item for item in package_types_obj if "ordinal" not in item] return package_types_obj
def get_settings(self): settings = {} try: items = boto3.client("dynamodb").scan( TableName=os.environ.get("SETTINGSTABLE"))["Items"] except: self.logging.error( f"""Could not read DynamoDB table '{os.environ.get("SETTINGSTABLE")}'.""" ) self.logging.error(sys.exc_info()[1]) else: for item in items: item_json = dynamodb_json.loads(item, True) settings[item_json.get("key")] = item_json.get("value") return settings
def fetch_and_populate_cols(self, conditional_items=None, key_condition_expression=None, filter_expression=None, attributes_to_fetch=[], sort_key=True): self.set_cols_none() result = self.fetch_row( conditional_items=conditional_items, key_condition_expression=key_condition_expression, filter_expression=filter_expression, attributes_to_fetch=attributes_to_fetch, sort_key=sort_key) if result: self.populate_cols(**result) return db_json.loads(result)
def top_bot(table, category): # read the data file data_df = pd.DataFrame(json.loads(table)) # choose the category data_df = data_df[data_df["topic"] == category] # choose humans data_df = data_df[data_df["user_type"] == "Bot"] plt.figure(figsize=(5, 5)) users = data_df.username.value_counts()[10::-1] plt.title('Top 10 Tweeting Bots') plt.xlabel('Frequency') plt.ylabel('User name') plt.show()
def lambda_handler(event, context): dynamodb = boto3.resource('dynamodb') #iterate event object to scan all update from db for ev in event['Records']: ev_type = ev['eventName'] ev_data = json_util.loads(ev['dynamodb']) if ev_type == 'MODIFY' or ev_type == 'INSERT': if int(ev_data['NewImage']['device']['dendrometerCh']) == -1: print(ev_data['NewImage']['device']['dendrometerCh']) return 0 plant_update(dynamodb, ev_data) elif ev_type == 'REMOVE': if int(ev_data['OldImage']['device']['dendrometerCh']) == -1: print(ev_data['OldImage']['device']['dendrometerCh']) return 0 delete_old_item(dynamodb, ev_data)
def get(self, entity_id): @paginate(first_match=True) def run_query(entity_id, **kwargs): return self._client.Table(self._table).query( KeyConditionExpression=Key('entity_id').eq(entity_id), FilterExpression=Attr('latest').eq(True) & Attr('active').eq(True), **kwargs) response = run_query(entity_id) if response["Count"] > 0: obj = response['Items'][0] obj = json_util.loads(obj) return obj else: return None
def findTimeZone(businessId, locationId): timeZone = 'America/Puerto_Rico' locZone = dynamodbQuery.query( TableName="TuCita247", ReturnConsumedCapacity='TOTAL', KeyConditionExpression='PKID = :key AND SKID = :skey', ExpressionAttributeValues={ ':key': { 'S': 'BUS#' + businessId }, ':skey': { 'S': 'LOC#' + locationId } }) for timeLoc in json_dynamodb.loads(locZone['Items']): timeZone = timeLoc[ 'TIME_ZONE'] if 'TIME_ZONE' in timeLoc else 'America/Puerto_Rico' return timeZone
def get_by_user_id(self, table, user_id): for attempt in range(0, 4): response = self._client.Table(table).query( Select='ALL_ATTRIBUTES', IndexName='by_user_id', KeyConditionExpression=Key('user_id').eq(user_id), FilterExpression=Attr('latest').eq(True) & Attr('active').eq(True)) if response["Count"] > 0: obj = response['Items'][0] obj = json_util.loads(obj) return obj else: time.sleep(attempt) else: return None
def get_settings(self): settings = {} try: paginator = boto3.client("dynamodb").get_paginator("scan") items = (paginator.paginate(TableName=os.environ.get( "SETTINGS_TABLE")).build_full_result().get("Items")) except: self.logging.error( f"""Could not read DynamoDB table '{os.environ.get("SETTINGS_TABLE")}'.""" ) self.logging.error(sys.exc_info()[1]) else: for item in items: item_json = dynamodb_json.loads(item, True) settings[item_json.get("key")] = item_json.get("value") return settings
def get_from_chache(id): """Gets an object by its id from DynamoDB""" client = boto3.client('dynamodb') response = client.get_item(TableName='serverless_crawler_index', Key={'id': { 'N': str(id) }}) try: result = dynamo_json.loads(response['Item']) result['source'] = 'cache' except KeyError as exc: result = None return result
def get_whitelist(self): whitelist = {} try: for record in boto3.client("dynamodb").scan( TableName=os.environ.get("WHITELISTTABLE"))["Items"]: record_json = dynamodb_json.loads(record, True) parsed_resource_id = Helper.parse_resource_id( record_json.get("resource_id")) whitelist.setdefault( parsed_resource_id.get("service"), {}).setdefault( parsed_resource_id.get("resource_type"), set()).add(parsed_resource_id.get("resource")) except: self.logging.error( f"""Could not read DynamoDB table '{os.environ.get("WHITELISTTABLE")}'.""" ) self.logging.error(sys.exc_info()[1]) return whitelist
def get_top_jobs(): # now analysis_df is local analysis_df = pd.DataFrame(json.loads( analysis_table.scan()['Items'])).fillna(False) analysis_df = sort_jobs(analysis_df) top_jobs_df = pd.DataFrame() for row in range(0, 10): if row >= len(analysis_df): break # print ("Score: " + str(analysis_df.iloc[row]['Score'])) JobId = analysis_df.iloc[row]['JobId'] response = jobs_table.get_item(Key={'JobId': JobId}) # print (JobId) job = response['Item'] # print(response) # print(job) job_row = pd.DataFrame([job], index=[JobId]) top_jobs_df = top_jobs_df.append(job_row) return top_jobs_df.to_json()
def lambda_handler(event, context): try: today = datetime.datetime.now() - datetime.timedelta(hours=0, minutes=5) currTime = today.strftime("%Y-%m-%d-%H-%M-%S") details = dynamodb.query( TableName="TuCita247", IndexName="TuCita247_CustAppos", ReturnConsumedCapacity='TOTAL', KeyConditionExpression='GSI2PK = :pkid AND GSI2SK <= :currTime', ExpressionAttributeValues={ ':pkid': { 'S': 'RES#APPO' }, ':currTime': { 'S': currTime } }) table = dynamodbQuery.Table('TuCita247') for item in json_dynamodb.loads(details['Items']): details = table.delete_item(Key={ 'PKID': item['PKID'], 'SKID': item['SKID'] }) statusCode = 200 body = json.dumps({'OnHold': 'Success', 'Code': 200}) except Exception as e: statusCode = 500 body = json.dumps({'Message': str(e), 'Code': 500}) response = { 'statusCode': statusCode, 'headers': { "content-type": "application/json", "access-control-allow-origin": "*" }, 'body': body } return response
def get_supplier_distributor_by_access_code(self, access_code): table = 'brewoptix-supplier-distributors' query = { 'KeyConditionExpression': Key('access_code').eq(access_code), 'FilterExpression': (Attr('latest').eq(True) & Attr('active').eq(True)), 'IndexName': 'by_access_code' } response = self._storage.get_items(table, query) if len(response['Items']) > 0: item = response['Items'][0] distributor = json_util.loads(clean(item)) else: raise NoSuchEntity return distributor
def get_event(event_id): response = client.get_item(TableName=EVENTS_TABLE, Key={'eventId': { 'S': event_id }}) item = response.get('Item') if not item: return jsonify({'error': 'Event does not exist'}), 404 response = { 'eventId': item.get('eventId').get('S'), 'eventoNome': item.get('eventoNome').get('S'), 'eventoData': item.get('eventoData').get('S'), 'eventoFilas': item.get('eventoFilas').get('M'), 'eventoServicos': item.get('eventoServicos').get('M'), 'eventoVisitantes': item.get('eventoVisitantes').get('M') } return dynamo_json.loads(response)
def get_settings(self): """Return the DynamoDB aws-auto-remediate-settings table in a Python dict format Returns: dict -- aws-auto-remediate-settings table """ settings = {} try: for record in self.client_dynamodb.scan( TableName=os.environ["SETTINGSTABLE"])["Items"]: record_json = dynamodb_json.loads(record, True) settings[record_json.get("key")] = record_json.get("value") except: self.logging.error( f"Could not read DynamoDB table '{os.environ['SETTINGSTABLE']}'." ) self.logging.error(sys.exc_info()[1]) return settings
def lambda_handler(event, context): print('executing create-game lambda function') db = boto3.resource('dynamodb', region_name='us-east-1') table = db.Table('CLUE_GAMES') try: # Generate Game UUID new_game_id = str(uuid4()) print('Next new_game_id is: ' + new_game_id) new_game = Game(new_game_id) # Add New Game Item to Table create_game_response_dynamo = table.put_item(Item=new_game.__dict__) create_game_response = dynamo_json.loads(create_game_response_dynamo) # Check Status Code of Put Item is 200 if create_game_response['ResponseMetadata']['HTTPStatusCode'] == 200: print( 'Put Item was succesful in dynamodb. Incrementing game counter item now.' ) else: print( 'Something messed up while putting new Game item in dynamodb.') raise response = { "statusCode": 200, "headers": { "Content-Type": "application/json" }, "isBase64Encoded": False, "body": new_game.__dict__ } return response except: print("ERROR:", sys.exc_info()[0]) raise
from dynamodb_json import json_util dynamodb = boto3.client('dynamodb', region_name='us-east-1') def get_tables_meta(): tables = dynamodb.list_tables() for table in tables['TableNames']: print(table) table_info = dynamodb.describe_table(TableName=table)['Table'] pprint.pprint(table_info['AttributeDefinitions']) print(get_tables_meta()) response = dynamodb.scan(TableName='RawFootfall') # from dynamodb format to normal sane json dynamodb_json = json_util.loads(response['Items']) # load into pandas import pandas df = pandas.read_json(json.dumps(dynamodb_json) # dump to file import json with open('footfall.json', 'w') as outfile: json.dump(dynamodb_json, outfile)