def make_filter_expressions(list_of_filter_dicts: list): """ Attr: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/customizations/dynamodb.html#boto3.dynamodb.conditions.Attr Create a filter expressions concatenated string to be used in dynamodb query. ['begins_with', 'between', 'eq', 'gt', 'gte', 'lt', 'lte', 'attribute_type', 'contains', 'exists', 'is_in', 'ne', 'not_exists', 'size'] :param list_of_filter_dicts: List of filter expressions (Attr link) -> example_entry: {'key':<key>, 'value': <value>, 'operator': <operator>}. If value requires range, use a tuple for values (<HIGH_VALUE>, <LOW_VALUE>) :return: String of filter expressions """ filter_expressions: Any = None first = True for filter_dict in list_of_filter_dicts: # concatenate filters if first: filter_expressions = _add_condition( conditions.Attr(filter_dict['key']), filter_dict['operator'], filter_dict['value']) first = False else: filter_expressions = filter_expressions & _add_condition( conditions.Attr(filter_dict['key']), filter_dict['operator'], filter_dict['value']) return filter_expressions
def get_logs(cedl_project): items = [] table = boto3.resource('dynamodb').Table('ProjectLog') prj_folder = "/{}/".format(xxx_project) resp = table.scan(FilterExpression=( c.Attr('source_file').contains(prj_folder) & c.Attr('execution_starttime').begins_with(SCAN_DT))) items.extend(resp['Items']) while 'LastEvaluatedKey' in resp: resp = table.scan(FilterExpression=( c.Attr('source_file').contains(prj_folder) & c.Attr('execution_starttime').begins_with(SCAN_DT)), ExclusiveStartKey=resp['LastEvaluatedKey']) items.extend(resp['Items']) return items
def __getattr__(self, attr): expr = conditions.Attr(self.attribute_name) if not hasattr(expr, attr): raise KeyError(f'Condition {attr} not found.') return getattr(expr, attr)
def translate_filters(self, filter_list=[], **kwargs): try: join_type = kwargs['join_type'] if 'join_type' in kwargs else 'and' filters = False for f in filter_list: for k in f.keys(): if type(f[k]) is list: # If the first item is a str, treat the whole # thing as a list of values for an is_in() test if type(f[k][0]) is str: cond = conditions.Attr(k).is_in( self.check_type(f[k])) else: cond = self.translate_filters(f[k], join_type=k) else: if type(f[k]) is not dict: # treat it as a simple "equals" comparison for the value f[k] = {'value': f[k]} if 'comparison_operator' not in f[k]: f[k]['comparison_operator'] = 'eq' if 'value' not in f[k]: cond = eval("conditions.Attr(k)." + f[k]['comparison_operator'] + "()") else: # Simple type translation f[k]['value'] = self.check_type(f[k]['value']) cond = eval("conditions.Attr(k)." + f[k]['comparison_operator'] + "(f[k]['value'])") if not filters: filters = cond else: if join_type.lower() == 'or': filters = filters | cond else: # Default to "AND" filters = filters & cond return filters except botocore.exceptions.ClientError as e: self.module.fail_json_aws( e, msg="Error constructing filter condition objects") except SyntaxError as e: # Check to make sure the comparator exists if f[k]['comparison_operator'] not in dir(conditions.Attr): self.module.fail_json_aws( e, 'Comparison "%s" not a valid comparison_operator' % (f[k]['comparison_operator'])) else: self.module.fail_json_aws( e, 'Error constructing filter condition objects')
def get_databases(pointer_table, configurations): log.info("Fetching databases", pointer_table=pointer_table) table = dynamodb.Table(pointer_table) response = table.scan(FilterExpression=conditions.Attr("log_group").is_in( [kk for kk in configurations.keys()])) group_datas = response["Items"] log.info("Fetched pointers", pointers=group_datas) for group_data in group_datas: db_key = group_data["db_key"] log_group = group_data["log_group"] config = configurations[log_group] local_destination = tempfile.mkstemp()[1] s3.Object(config["bucket_name"], db_key).download_file(local_destination) with tarfile.open(local_destination) as tar: destination = "/tmp/" + log_group tar.extractall(path=destination) log.info("Fetched existing database", log_group=log_group, configuration=config) config["local_db"] = destination config["last_updated"] = group_data["update_time"] for log_group, config in configurations.items(): if "local_db" not in config: log.info("Creating new database", log_group=log_group, configuration=config) path = "/tmp/" + log_group if not os.path.exists(path): os.makedirs(path) goaccess( tempfile.mkstemp()[1], tempfile.mkstemp()[1], path, configurations[log_group]["log_format"], configurations[log_group]["time_format"], configurations[log_group]["date_format"], ) config["local_db"] = path # Default to 90 days back start = datetime.datetime.utcnow() + datetime.timedelta(days=-90) config["last_updated"] = start.strftime("%Y-%m-%d %H:%M:%S")
def ddb_complete(upload_etag_full, table, Src_bucket, Src_key, versionId): if upload_etag_full not in ["TIMEOUT", "ERR", "QUIT"]: status = "DONE" else: status = upload_etag_full cur_time = time.time() table_key = str(PurePosixPath(Src_bucket) / Src_key) if Src_key[-1] == '/': # 针对空目录对象 table_key += '/' UpdateExpression = "ADD jobStatus :done SET totalSpentTime=:s-firstTime, endTime=:s, endTime_f=:e" ExpressionAttributeValues = { ":done": {status}, ":s": int(cur_time), ":e": time.asctime(time.localtime(cur_time)) } # 正常写DDB,如果是异常的就不加这个lastTimeProgress=100 if status == "DONE": UpdateExpression += ", lastTimeProgress=:p" ExpressionAttributeValues[":p"] = 100 # update DDB logger.info( f'Write job complete status to DDB: {status} - {Src_bucket}/{Src_key}') try: table.update_item( Key={"Key": table_key}, UpdateExpression=UpdateExpression, ExpressionAttributeValues=ExpressionAttributeValues, ConditionExpression=conditions.Attr('versionId').eq(versionId)) except ClientError as e: if e.response['Error']['Code'] == 'ConditionalCheckFailedException': logger.error(f'versionId_not_match - {Src_bucket}/{Src_key}') return 'versionId_not_match' logger.error( f'ClientError Fail to put log to DDB at end - {Src_bucket}/{Src_key} - {str(e)}' ) except Exception as e: logger.error( f'Fail to put log to DDB at end - {Src_bucket}/{Src_key} - {str(e)}' ) return
def not_exists(attribute_name): return boto_conditions.Attr(attribute_name).not_exists()
def is_equal(attribute_name, tester_data): return boto_conditions.Attr(attribute_name).eq(tester_data)
def number_lower_than(attribute_name, tester_data): return boto_conditions.Attr(attribute_name).lt(tester_data)