def _raise_if_field_name_is_invalid(field_name: str): restricted_chars_matches: Optional[List[tuple]] = re.findall( pattern=FIELD_NAME_RESTRICTED_CHARS_EXPRESSION, string=field_name) if restricted_chars_matches is not None and len( restricted_chars_matches) > 0: raise InvalidFieldNameException( message_with_vars( message= "A field name was using one or multiple restricted chars", vars_dict={ 'fieldName': field_name, 'restrictedCharsMatch': restricted_chars_matches, 'FIELD_NAME_RESTRICTED_CHARS_LIST': FIELD_NAME_RESTRICTED_CHARS_LIST })) restricted_names_matches: Optional[List[tuple]] = re.findall( pattern=FIELD_NAME_RESTRICTED_NAMES_EXPRESSION, string=field_name) if restricted_names_matches is not None and len( restricted_names_matches) > 0: raise InvalidFieldNameException( message_with_vars( message="A field name was using a restricted name", vars_dict={ 'fieldName': field_name, 'restricted_names_matches': restricted_names_matches, 'FIELD_NAME_RESTRICTED_NAMES_LIST': FIELD_NAME_RESTRICTED_NAMES_LIST }))
def make_rendered_database_path( database_path_elements: List[DatabasePathElement], query_kwargs: dict) -> List[DatabasePathElement]: output_database_path_elements: List[DatabasePathElement] = [] for path_element in database_path_elements: if "$key$:" not in path_element.element_key: # If the path_element do not contains a key that need to be modified, we can use the current # instance of the path element, since it will not be modified, and so will not cause issue # when other invocations of queries and operations will use the same path element instance. output_database_path_elements.append(path_element) else: variable_name = path_element.element_key.replace('$key$:', '') if query_kwargs is not None: matching_kwarg: Optional[Any] = query_kwargs.get( variable_name, None) if matching_kwarg is not None: # If the key of the path_element needs to be modified, we do not modify the existing path element, # but we create a new instance of path element. Since the database_path_elements variable is retrieved # using the static _database_path variable, the path elements in database_path_elements needs to # remained unmodified, so that other invocations of queries and operations will be able to have # cleans element keys that will properly be filled with the query_kwargs specified in the request. output_database_path_elements.append( DatabasePathElement( element_key=matching_kwarg, default_type=path_element.default_type, custom_default_value=path_element. custom_default_value)) else: raise MissingQueryKwarg( message_with_vars( message= "A variable was required but not found in the query_kwargs dict passed to the make_rendered_database_path function.", vars_dict={ "keyVariableName": variable_name, "matchingKwarg": matching_kwarg, "queryKwargs": query_kwargs, "databasePathElements": database_path_elements })) else: raise Exception( message_with_vars( message= "A variable was required but no query_kwargs have been passed to the make_rendered_database_path function.", vars_dict={ "keyVariableName": variable_name, "queryKwargs": query_kwargs, "databasePathElements": database_path_elements })) return output_database_path_elements
def _get_or_query_single_item(self, key_name: str, key_value: str, fields_to_get: List[str]) -> Optional[dict]: if self.primary_index.hash_key_name == key_name: response_item: Optional[dict] = self.get_item_by_primary_key( key_name=key_name, key_value=key_value, fields_to_get=fields_to_get ).item return response_item else: if key_name not in self._global_secondary_indexes_hash_keys: print(message_with_vars( message="A key_name was not the primary_index key_name, and was not found in the global_secondary_indexes" "hash_keys. Database query not executed, and None is being returned.", vars_dict={"primary_index.hash_key_name": self.primary_index.hash_key_name, "_global_secondary_indexes_hash_keys": self._global_secondary_indexes_hash_keys, "key_name": key_name, "key_value": key_value, "fields_to_get": fields_to_get} )) return None else: response_items: Optional[List[dict]] = self.query_by_key( index_name=key_name, key_name=key_name, key_value=key_value, fields_to_get=fields_to_get, query_limit=1 ).items if isinstance(response_items, list) and len(response_items) > 0: return response_items[0] else: return None
def make_rendered_fields_paths(fields_paths: List[str], query_kwargs: dict) -> List[str]: for i, field_key in enumerate(fields_paths): start_variable_first_char_index = field_key.find("{{") if not start_variable_first_char_index == -1: end_variable_first_char_index = field_key.find("}}") if not end_variable_first_char_index == -1: start_variable_last_char_index = start_variable_first_char_index + 2 end_variable_last_char_index = end_variable_first_char_index + 2 variable_key_name = field_key[start_variable_last_char_index: end_variable_first_char_index] variable_matching_kwarg = query_kwargs.get( variable_key_name, None) if variable_matching_kwarg is None: raise Exception( message_with_vars( message= "A key was required in a field to get, but no matching query kwarg was found.", vars_dict={ "fieldsPaths": fields_paths, "fieldKey": field_key, "variableKeyName": variable_key_name, "queryKwargs": query_kwargs, "variableMatchingKwarg": variable_matching_kwarg })) fields_paths[ i] = f"{field_key[0:start_variable_first_char_index]}{variable_matching_kwarg}{field_key[end_variable_last_char_index:0]}" return fields_paths
def convert(python_type: Any) -> str: dynamodb_type: Optional[str] = PythonToDynamoDBTypesConvertor.switch.get(python_type, None) if dynamodb_type is None: raise Exception(message_with_vars( message="Python to DynamoDB types conversion failed. The specified Python type is not supported", vars_dict={'specifiedPythonType': python_type} )) return dynamodb_type
def _model_contain_all_index_keys(model: Any, indexes_keys: Iterable[str]) -> bool: for index_key in indexes_keys: index_matching_field: Optional[Any] = getattr(model, index_key, None) if index_matching_field is None: print(message_with_vars( message="An index key selector was not found in the table model. Operation not executed.", vars_dict={'index_key': index_key, 'index_matching_field': index_matching_field, 'table.model': model} )) return False return True
def __init__(self, name: str, field_type: Optional[Any] = None, required: Optional[bool] = False, not_modifiable: Optional[bool] = False, custom_default_value: Optional[Any] = None, key_name: Optional[str] = None, max_nested_depth: Optional[int] = 32): super().__init__(field_type=field_type if field_type is not None else Any, custom_default_value=custom_default_value) self._name = name self._required = required self._key_name = None if max_nested_depth is not None and max_nested_depth > 32: raise Exception(f"DynamoDB support a maximum depth of nested of items of 32. This is not imposed by StructNoSQL but a platform limitation.\n" f"See : https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html#limits-attributes") self._max_nested = max_nested_depth if not_modifiable is True: raise Exception(f"Not modifiable not yet implemented") if key_name is not None: insta = isinstance(field_type, (tuple, list)) if field_type is dict or type(field_type) is _GenericAlias: self._key_name = key_name elif isinstance(field_type, (tuple, list)): raise Exception(f"Multiple dictionaries are not yet supported.") all_items_are_dict = True for item in field_type: item_type = type(item) if not isinstance(item, (dict, _GenericAlias)): all_items_are_dict = False break if all_items_are_dict is True: self._key_name = key_name else: raise Exception(message_with_vars( "key_name cannot be set on a field that is a tuple or list that does not exclusivly contains dict or Dict items", vars_dict={"fieldName": name, "fieldType": field_type, "keyName": key_name} )) else: raise Exception(message_with_vars( "key_name cannot be set on a field that is not of type dict or Dict", vars_dict={"fieldName": name, "fieldType": field_type, "keyName": key_name} )) else: if field_type == dict or type(field_type) == _GenericAlias: self._key_name = f"{name}Key"
def process_and_get_field_path_object_from_field_path( field_path_key: str, fields_switch: dict) -> BaseItem: current_field_object = fields_switch.get(field_path_key, None) if current_field_object is not None: return current_field_object else: raise FieldTargetNotFoundException( message_with_vars(message=f"A field target to get was not found.", vars_dict={ "fieldPathKey": field_path_key, "fieldsSwitch": fields_switch }))
def dict_item(self): """ :return: BaseField """ if self._field_type == dict and self.dict_items_excepted_type is not None: map_item = MapItem(parent_field=self, field_type=self.dict_items_excepted_type, model_type=self.dict_items_excepted_type) return map_item else: raise Exception(message_with_vars( message="Tried to access dict item of a field that was not of type dict, " "Dict or did not properly received the expected type of the dict items.", vars_dict={"fieldName": self.field_name, "fieldType": self.field_type, "dictItemsExceptedType": self.dict_items_excepted_type} ))
def _get_field_object_from_field_path( field_path_key: str, fields_switch: Dict[str, BaseField]) -> BaseField: current_field_object: Optional[BaseField] = fields_switch.get( field_path_key, None) if current_field_object is not None: return current_field_object else: raise FieldTargetNotFoundException( message_with_vars(message=f"A field target to get was not found.", vars_dict={ "field_path_key": field_path_key, "fields_switch": fields_switch }))
def delete_record(self, indexes_keys_selectors: dict) -> bool: found_all_indexes = True for index_key, index_target_value in indexes_keys_selectors.items(): index_matching_field = getattr(self.model, index_key, None) if index_matching_field is None: found_all_indexes = False print(message_with_vars( message="An index key selector passed to the delete_record function, was not found, in the table model. Operation not executed.", vars_dict={"index_key": index_key, "index_target_value": index_target_value, "index_matching_field": index_matching_field, "table.model": self.model} )) if found_all_indexes is True: return self.dynamodb_client.delete_record(indexes_keys_selectors=indexes_keys_selectors) else: return False
def _execute_update_query_with_initialization_if_missing(self, key_name: str, key_value: Any, update_query_kwargs: dict, setters: List[DynamoDBMapObjectSetter]) -> Optional[Response]: response = self._execute_update_query(query_kwargs_dict=update_query_kwargs) if response is None: # If the response is None, it means that one of the path of the # target path has not been found and need to be initialized. for i_setter, current_setter in enumerate(setters): success: bool = self.initialize_all_elements_in_map_target( key_name=key_name, key_value=key_value, target_path_elements=current_setter.target_path_elements ) print(message_with_vars( message="Initialized a field after a set/update multiple data elements in map request had failed.", vars_dict={"fieldTargetPathElements": current_setter.target_path_elements} )) response = self._execute_update_query(query_kwargs_dict=update_query_kwargs) return response
def process_and_get_field_path_object_from_field_path( field_path_key: str, fields_switch: dict ) -> Tuple[Union[BaseField, Dict[str, BaseField]], bool]: matches: Optional[List[tuple]] = re.findall( pattern=MULTI_ATTRIBUTES_SELECTOR_REGEX_EXPRESSION, string=field_path_key) if matches is not None and len(matches) > 0: for match in matches: selected_string = ''.join([selector for selector in match]) attributes_selectors_string: str = match[1] attributes_selector_list = attributes_selectors_string.replace( ' ', '').split(',') attributes_fields_objets: Dict[str, BaseField] = {} for attribute_selector in attributes_selector_list: current_attribute_field_path = field_path_key.replace( selected_string, attribute_selector) """if len(current_attribute_field_path) > 0 and current_attribute_field_path[0] == ".": # If the start of the field path is a multi selector, the replace will unfortunately add an # invalid point, which we will remove if we see that the first char of the field path is a point. current_attribute_field_path = current_attribute_field_path[1:]""" attributes_fields_objets[ attribute_selector] = _get_field_object_from_field_path( field_path_key=current_attribute_field_path, fields_switch=fields_switch) num_attributes_fields: int = len(attributes_fields_objets) if not num_attributes_fields > 0: raise Exception( message_with_vars( message= "Cannot use an attribute selector ( ) without specifying any attribute inside it.", vars_dict={ 'field_path_key': field_path_key, 'attributes_selectors_string': attributes_selectors_string })) return attributes_fields_objets, True return _get_field_object_from_field_path( field_path_key=field_path_key, fields_switch=fields_switch), False
def validate_data( value: Any, expected_value_type: Any, item_type_to_return_to: Optional[BaseItem] = None ) -> Tuple[Any, bool]: value_type = type(value) # We do not try_to_get_primitive_default_type_of_item here, because depending on the value_type, we might trigger different behaviors. # For example, a list or tuple will be considered as collection of multiple fields types that needs to be looked at individually. if expected_value_type == Any: return value, True if type(expected_value_type) in [list, tuple]: has_found_match = False for acceptable_value_type in expected_value_type: if _types_match(type_to_check=value_type, expected_type=acceptable_value_type): has_found_match = True break if has_found_match is not True: vars_dict = {'value': value, 'valueType': value_type, 'expectedValueType': expected_value_type} if item_type_to_return_to is not None: vars_dict['itemExpectedTypeDatabasePath'] = item_type_to_return_to.database_path print(message_with_vars( message=f"Primitive value did not match any of the possible " f"expected types. Value of None is being returned.", vars_dict=vars_dict )) return None, False else: if not _types_match(type_to_check=value_type, expected_type=expected_value_type): vars_dict = {'value': value, 'valueType': value_type, 'expectedValueType': expected_value_type} if item_type_to_return_to is not None: vars_dict['itemExpectedTypeDatabasePath'] = item_type_to_return_to.database_path print(message_with_vars( message=f"Primitive value did not match expected " f"type. Value of None is being returned.", vars_dict=vars_dict )) return None, False if value_type == dict: value: dict # todo: fix a bug, where for some reasons, when calling the get_field function, if what # we get is a dict that has only key and one item, instead of returning the dict, we will return the value in the dict item_keys_to_pop: List[str] = [] if item_type_to_return_to is not None: if ( item_type_to_return_to.map_model is not None and not isinstance(item_type_to_return_to.map_model, DictModel) ): populated_required_fields: List[BaseField] = [] item_keys_to_pop: List[str] = [] for key, item in value.items(): item_matching_validation_model_variable: Optional[BaseField] = getattr(item_type_to_return_to.map_model, key, None) if item_matching_validation_model_variable is not None: item, valid = validate_data( value=item, item_type_to_return_to=item_matching_validation_model_variable, expected_value_type=item_matching_validation_model_variable.field_type ) if valid is True: value[key] = item if item_matching_validation_model_variable.required is True: populated_required_fields.append(item_matching_validation_model_variable) else: item_keys_to_pop.append(key) else: item_keys_to_pop.append(key) print(message_with_vars( message=f"No map validator was found in a nested item of a dict. Item will be removed from data.", vars_dict={"key": key, "item": item} )) map_model_required_fields: Optional[List[BaseField]] = getattr(item_type_to_return_to.map_model, 'required_fields', None) if map_model_required_fields is None: raise Exception("Missing required_fields") if len(map_model_required_fields) != len(populated_required_fields): missing_required_fields_database_paths: List[List[DatabasePathElement]] = [] for current_required_field in map_model_required_fields: if current_required_field not in populated_required_fields: missing_required_fields_database_paths.append(current_required_field.database_path) print(message_with_vars( message="Missing required fields on map element. Returning None and valid to False.", vars_dict={"missingRequiredFieldsDatabasePaths": missing_required_fields_database_paths} )) return None, False else: for key, item in value.items(): if item_type_to_return_to.key_expected_type is not None: key_type = type(key) if not _types_match(type_to_check=key_type, expected_type=item_type_to_return_to.key_expected_type): print(message_with_vars( message=f"Key of an item in a dict did not match expected key type. Item will be removed from data.", vars_dict={"key": key, "item": item, "keyType": key_type, "expectedKeyType": item_type_to_return_to.key_expected_type} )) item_keys_to_pop.append(key) continue if item_type_to_return_to.items_excepted_type is not None: if hasattr(item_type_to_return_to.items_excepted_type, '__bases__') and MapModel in item_type_to_return_to.items_excepted_type.__bases__: # We check if the items_excepted_type contains the __bases__ attributes, because form values (like the Any value that is assigned both when # using an untyped dict or when using Any in a typed Dict) will not contain the __bases__ attribute and will raise if trying to access it. element_item_keys_to_pop: List[str] = [] item_type = type(item) if not _types_match(type_to_check=item_type, expected_type=dict): print(message_with_vars( message=f"Received data that should be set inside a nested MapModel " f"was not of type dict. Item will be removed from data.", vars_dict={"key": key, "item": item, "itemType": item_type} )) item_keys_to_pop.append(key) continue item: dict item_matching_validation_model_variable: Optional[BaseField] = getattr(item_type_to_return_to.map_model, key, None) if item_matching_validation_model_variable is not None: for element_item_key, element_item_value in item.items(): element_item_matching_validation_model_variable: Optional[BaseField] = getattr( item_matching_validation_model_variable, element_item_key, None ) if element_item_matching_validation_model_variable is not None: element_item_value, valid = validate_data( value=element_item_value, item_type_to_return_to=element_item_matching_validation_model_variable, expected_value_type=element_item_matching_validation_model_variable.field_type, ) if valid is True: item[element_item_key] = element_item_value else: if element_item_matching_validation_model_variable.required is not True: element_item_keys_to_pop.append(element_item_key) else: item_keys_to_pop.append(key) break else: element_item_keys_to_pop.append(element_item_key) print(message_with_vars( message=f"No map validator was found in a nested item of a dict. Item will be removed from data.", vars_dict={"elementItemKey": key, "elementItemValue": element_item_value} )) else: print(message_with_vars( message=f"No map validator was found in a item of a dict. Item will be removed from data.", vars_dict={"itemKey": key, "itemValue": item} )) for element_item_key_to_pop in element_item_keys_to_pop: item.pop(element_item_key_to_pop) else: if not _types_match(type_to_check=type(item), expected_type=item_type_to_return_to.items_excepted_type): item_keys_to_pop.append(key) print(message_with_vars( message=f"Value of nested item of dict did not match expected type. Item will be removed from data.", vars_dict={"item": item, "itemKey": key, "expectedItemValueType": item_type_to_return_to.items_excepted_type} )) else: value[key] = item num_dict_items = len(value) if num_dict_items > 0 and (len(item_keys_to_pop) == num_dict_items): print(message_with_vars( message="The value dict to validate was not empty, but all of its items have been " "removed because they did not matched the model. Value of None is returned.", vars_dict={"value": value, "item_keys_to_pop": item_keys_to_pop} )) return None, False else: for item_key_to_pop in item_keys_to_pop: value.pop(item_key_to_pop) return value, True elif value_type == list: value: list if True: # list_items_models is not None: # todo: add type checking fo list models indexes_to_pop: List[int] = [] for i, item in enumerate(value): if item_type_to_return_to.map_model is not None: item, valid = validate_data( value=item, expected_value_type=item_type_to_return_to.map_model, ) if valid is False: indexes_to_pop.append(i) elif item_type_to_return_to.items_excepted_type is not None: item, valid = validate_data( value=item, expected_value_type=item_type_to_return_to.items_excepted_type, ) if valid is False: indexes_to_pop.append(i) # If no map validator has been found, this means we have an untyped list. So, we will # not perform any data validation on the list items and consider all the items valid. """else: indexes_to_pop.append(i) print(message_with_vars( message=f"No map validator was found in a nested item of a list. Value will be removed from data.", vars_dict={"listValue": value, "item": item, "itemIndex": i} ))""" indexes_to_pop.reverse() for index in indexes_to_pop: value.pop(index) elif value_type == set: value: set if item_type_to_return_to.items_excepted_type is not None: items_keys_values_to_remove = [] for set_item in value: item_type = type(set_item) if not _types_match(type_to_check=item_type, expected_type=item_type_to_return_to.items_excepted_type): items_keys_values_to_remove.append(set_item) print(message_with_vars( message=f"Value of item of set did not match expected type. Item will be removed from data.", vars_dict={'item': set_item, 'itemType': item_type, 'expectedItemValueType': item_type_to_return_to.items_excepted_type} )) num_set_items = len(value) if num_set_items > 0 and (len(items_keys_values_to_remove) == num_set_items): print(message_with_vars( message="The value set to validate was not empty, but all of its items have been " "removed because they did not matched the model. Value of None is returned.", vars_dict={'value': value, 'itemsToRemove': items_keys_values_to_remove} )) return None, False else: for item_to_remove in items_keys_values_to_remove: value.remove(item_to_remove) return value, True return value, True """ # Even tough DynamoDB does not support float types, the conversion between floats to Decimal is being done in the DynamoDBCore functions elif value_type == float: # DynamoDB does not support float types. They must be converted to Decimal's. return value, True """ return value, True
def validate_data( value: Any, expected_value_type: Any, item_type_to_return_to: Optional[BaseItem] = None) -> Tuple[Any, bool]: value_type = type(value) if expected_value_type == Any: return float_to_decimal_serializer(value), True if type(expected_value_type) in [list, tuple]: has_found_match = False for acceptable_value_type in expected_value_type: if _types_match(type_to_check=value_type, expected_type=acceptable_value_type): has_found_match = True break if has_found_match is not True: print( message_with_vars( message= f"Primitive value did not match any of the possible expected types. Value of None is being returned.", vars_dict={ "itemExpectedTypeDatabasePath": item_type_to_return_to.database_path, "value": value, "valueType": value_type, "acceptableExpectedValueTypes": expected_value_type })) return None, False else: if not _types_match(type_to_check=value_type, expected_type=expected_value_type): print( message_with_vars( message= f"Primitive value did not match expected type. Value of None is being returned.", vars_dict={ "itemExpectedTypeDatabasePath": item_type_to_return_to.database_path, "value": value, "valueType": value_type, "expectedValueType": expected_value_type })) return None, False if value_type == dict: value: dict # todo: fix a bug, where for some reasons, when calling the get_single_field_value_from_single_item function, if what # we get is a dict that has only key and one item, instead of returning the dict, we will return the value in the dict item_keys_to_pop: List[str] = list() if item_type_to_return_to is not None: if item_type_to_return_to.map_model is not None: populated_required_fields: List[BaseField] = list() item_keys_to_pop: List[str] = list() for key, item in value.items(): item_matching_validation_model_variable: Optional[ BaseField] = getattr(item_type_to_return_to.map_model, key, None) if item_matching_validation_model_variable is not None: item, valid = validate_data( value=item, item_type_to_return_to= item_matching_validation_model_variable, expected_value_type= item_matching_validation_model_variable.field_type, ) if valid is True: value[key] = item if item_matching_validation_model_variable.required is True: populated_required_fields.append( item_matching_validation_model_variable) else: item_keys_to_pop.append(key) else: item_keys_to_pop.append(key) print( message_with_vars( message= f"No map validator was found in a nested item of a dict. Item will be removed from data.", vars_dict={ "key": key, "item": item })) if len(item_type_to_return_to.map_model.required_fields ) != len(populated_required_fields): missing_required_fields_database_paths: List[ List[DatabasePathElement]] = list() for current_required_field in item_type_to_return_to.map_model.required_fields: if current_required_field not in populated_required_fields: missing_required_fields_database_paths.append( current_required_field.database_path) print( message_with_vars( message= "Missing required fields on map element. Returning None and valid to False.", vars_dict={ "missingRequiredFieldsDatabasePaths": missing_required_fields_database_paths })) return None, False else: for key, item in value.items(): if item_type_to_return_to.dict_key_expected_type is not None: key_type = type(key) if not _types_match( type_to_check=key_type, expected_type=item_type_to_return_to. dict_key_expected_type): print( message_with_vars( message= f"Key of an item in a dict did not match expected key type. Item will be removed from data.", vars_dict={ "key": key, "item": item, "keyType": key_type, "expectedKeyType": item_type_to_return_to. dict_key_expected_type })) item_keys_to_pop.append(key) continue if item_type_to_return_to.dict_items_excepted_type is not None: if MapModel in item_type_to_return_to.dict_items_excepted_type.__bases__: element_item_keys_to_pop: List[str] = list() item_type = type(item) if not _types_match(type_to_check=item_type, expected_type=dict): print( message_with_vars( message= f"Received data that should be set inside a nested MapModel " f"was not of type dict. Item will be removed from data.", vars_dict={ "key": key, "item": item, "itemType": item_type })) item_keys_to_pop.append(key) continue item: dict item_matching_validation_model_variable: Optional[ BaseField] = getattr( item_type_to_return_to.map_model, key, None) if item_matching_validation_model_variable is not None: for element_item_key, element_item_value in item.items( ): element_item_matching_validation_model_variable: Optional[ BaseField] = getattr( item_matching_validation_model_variable, element_item_key, None) if element_item_matching_validation_model_variable is not None: element_item_value, valid = validate_data( value=element_item_value, item_type_to_return_to= element_item_matching_validation_model_variable, expected_value_type= element_item_matching_validation_model_variable .field_type, ) if valid is True: item[ element_item_key] = element_item_value else: if element_item_matching_validation_model_variable.required is not True: element_item_keys_to_pop.append( element_item_key) else: item_keys_to_pop.append(key) break else: element_item_keys_to_pop.append( element_item_key) print( message_with_vars( message= f"No map validator was found in a nested item of a dict. Item will be removed from data.", vars_dict={ "elementItemKey": key, "elementItemValue": element_item_value })) else: print( message_with_vars( message= f"No map validator was found in a item of a dict. Item will be removed from data.", vars_dict={ "itemKey": element_item_key, "itemValue": item })) for element_item_key_to_pop in element_item_keys_to_pop: item.pop(element_item_key_to_pop) else: if not _types_match( type_to_check=type(item), expected_type=item_type_to_return_to. dict_items_excepted_type): item_keys_to_pop.append(key) print( message_with_vars( message= f"Value of nested item of dict did not match expected type. Item will be removed from data.", vars_dict={ "item": item, "itemKey": key, "expectedItemValueType": item_type_to_return_to. dict_items_excepted_type })) else: value[key] = item if len(value) > 0 and (len(item_keys_to_pop) == len(value)): print( message_with_vars( message= "The value dict to validate was not empty, but all of its items have been " "removed because they did not matched the model. Value of None is returned.", vars_dict={ "value": value, "item_keys_to_pop": item_keys_to_pop })) return None, False else: for item_key_to_pop in item_keys_to_pop: value.pop(item_key_to_pop) return value, True elif value_type == list: value: list if True: # list_items_models is not None: # todo: add type checking fo list models indexes_to_pop: List[int] = list() for i, item in enumerate(value): matching_validation_model_variable: Optional[ BaseField] = getattr(item_type_to_return_to.map_model, key, None) if matching_validation_model_variable is not None: item, valid = validate_data( value=item, expected_value_type=matching_validation_model_variable. field_type) if item is None: indexes_to_pop.append(i) else: indexes_to_pop.append(i) print( message_with_vars( message= f"No map validator was found in a nested item of a list. Value will be removed from data.", vars_dict={ "listValue": value, "item": item, "itemIndex": i })) elif value_type == float: # DynamoDB does not support float types. They must be converted to Decimal's. return float_to_decimal(float_number=value), True return value, True
def __init__(self, field_type: Any, required: Optional[bool] = False, not_modifiable: Optional[bool] = False, custom_field_name: Optional[str] = None, custom_default_value: Optional[Any] = None, key_name: Optional[str] = None, max_nested_depth: Optional[int] = 32): super().__init__(field_type=field_type, custom_default_value=custom_default_value) self._field_name = custom_field_name if self._field_name is not None: _raise_if_field_name_is_invalid(field_name=self._field_name) # If a custom_field_name has been specified by the user, it will be set as the field_name # (which will then be validated for invalid characters). The field_name will not be able to be # modified a second time (otherwise the field_name property setter will cause an exception), # so the process_item function of the base_table.py will see that the field_name has already been # initialized, and will not try to initialize it from the variable key_name from the class signature. self._required = required self._key_name = None if self._required is not True: # If the field is not required, we add the NoneType to the field_type if isinstance(self._field_type, list): # If it's a list, we just a need to append the NoneType self._field_type.append(NoneType) elif isinstance(self._field_type, tuple): # If it's a tuple, we combine the existing tuple with a tuple containing the NoneType self._field_type = self._field_type + (NoneType, ) else: # And it it's any other type, we create a new tuple with the field_type and the NoneType self._field_type = (self._field_type, NoneType) if max_nested_depth is not None and max_nested_depth > 32: raise Exception( f"DynamoDB support a maximum depth of nested of items of 32. This is not imposed by StructNoSQL but a platform limitation.\n" f"See : https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html#limits-attributes" ) self._max_nested = max_nested_depth if not_modifiable is True: raise Exception(f"Not modifiable not yet implemented") if key_name is not None: if field_type in [dict, set, list ] or type(field_type) is _GenericAlias: self._key_name = key_name elif isinstance(field_type, (tuple, list)): raise Exception( f"Multiple dictionaries are not yet supported.") all_items_are_dict = True for item in field_type: item_type = type(item) if not isinstance(item, (dict, _GenericAlias)): all_items_are_dict = False break if all_items_are_dict is True: self._key_name = key_name else: raise Exception( message_with_vars( message= "key_name cannot be set on a field that is a tuple or list that does not exclusively contains dict or Dict items", vars_dict={ 'fieldName': name, 'fieldType': field_type, 'keyName': key_name })) else: raise Exception( message_with_vars( message= "key_name cannot be set on a field that is not of type dict, Dict, list, List, set or Set", vars_dict={ 'fieldName': self.field_name, 'fieldType': field_type, 'keyName': key_name }))
def __init__(self, field_type: Optional[type] = Any, custom_default_value: Optional[Any] = None): # todo: add a file_url field_type self._value = None self._query = None self.map_key_expected_type: Optional[type] = None self.map_model: Optional[MapModel or type] = None self._custom_default_value = custom_default_value self._field_type = field_type self._default_field_type = field_type if isinstance(self._field_type, (list, tuple)): if not len(self._field_type) > 0: raise Exception( "At least one field_type must be specified in a list or tuple of field types" ) self._default_field_type = self._field_type[0] elif isinstance(self._field_type, _GenericAlias): alias_variable_name: Optional[str] = self._field_type.__dict__.get( '_name', None) if alias_variable_name is not None: alias_args: Optional[Tuple] = self._field_type.__dict__.get( '__args__', None) if alias_variable_name == "Dict": self._field_type = dict self._default_field_type = dict self._key_expected_type = alias_args[0] if self._key_expected_type not in ACCEPTABLE_KEY_TYPES: raise Exception( message_with_vars( message= "Key in a Dict field was not found in the acceptable key types", vars_dict={ '_key_expected_type': self._key_expected_type, 'ACCEPTABLE_KEY_TYPES': ACCEPTABLE_KEY_TYPES })) self._items_excepted_type = alias_args[1] if isinstance(self._items_excepted_type, _GenericAlias): self._items_excepted_type = _alias_to_model( alias=self._items_excepted_type) elif alias_variable_name == "Set": self._field_type = set self._default_field_type = set self._items_excepted_type = alias_args[0] if isinstance(self._items_excepted_type, _GenericAlias): self._items_excepted_type = _alias_to_model( alias=self._items_excepted_type) # todo: rename the _items_excepted_type variable elif alias_variable_name == "List": self._field_type = list self._default_field_type = list self._items_excepted_type = alias_args[0] if isinstance(self._items_excepted_type, _GenericAlias): self._items_excepted_type = _alias_to_model( alias=self._items_excepted_type) # todo: rename the _items_excepted_type variable else: raise Exception( f"Unsupported GenericAlias : {alias_variable_name}") elif MapModel in getattr(self._field_type, '__mro__', ()): self.map_model = self._field_type self._field_type = dict self._default_field_type = dict elif self._field_type == dict: # Handle an untyped dict self._items_excepted_type = Any self._key_expected_type = Any elif self._field_type == list: # Handle an untyped list self._items_excepted_type = Any elif self._field_type == set: # Raise on an untyped set raise UsageOfUntypedSetException()
def process_item(self, item_key_name: Optional[str], class_type: Optional[type], variable_item: Any, current_field_path: str, current_path_elements: Optional[ List[DatabasePathElement]] = None, is_nested: bool = False) -> list: required_fields = [] if current_path_elements is None: current_path_elements = [] field_is_valid: bool = False try: if isinstance(variable_item, DictModel): variable_item: DictModel """new_database_path_element = DatabasePathElement( element_key=variable_item.key_name, default_type=variable_item.item_type )""" variable_item._database_path = [ *current_path_elements ] #, new_database_path_element] variable_item._table = self.table """if variable_item.required is True: required_fields.append(variable_item)""" current_field_path += ( "" if len(current_field_path) == 0 else ".") + "{{" + variable_item.key_name + "}}" field_is_valid = self.table.fields_switch.set( key=current_field_path, item=copy(variable_item)) elif MapModel in getattr(variable_item, '__mro__', ()): if len(current_path_elements) > 0: self.assign_internal_mapping_from_class( class_type=variable_item, nested_field_path=current_field_path, current_path_elements=[*current_path_elements]) elif isinstance(variable_item, BaseField): variable_item: BaseField if item_key_name is not None and variable_item.field_name is None: variable_item.field_name = item_key_name new_database_path_element = DatabasePathElement( element_key=variable_item.field_name, default_type=variable_item.default_field_type, custom_default_value=variable_item.custom_default_value) variable_item._database_path = [ *current_path_elements, new_database_path_element ] variable_item._table = self.table if variable_item.required is True: required_fields.append(variable_item) current_field_path += f"{variable_item.field_name}" if len( current_field_path ) == 0 else f".{variable_item.field_name}" field_is_valid = self.table.fields_switch.set( key=current_field_path, item=copy(variable_item)) if variable_item.key_name is not None: if "{i}" not in variable_item.key_name: # The current_field_path concat is being handled lower in the code for the nested fields current_field_path += ".{{" + variable_item.key_name + "}}" if field_is_valid is True: if variable_item.map_model is not None: self.assign_internal_mapping_from_class( class_type=variable_item.map_model, nested_field_path=current_field_path, current_path_elements=[*variable_item.database_path]) if variable_item.items_excepted_type is not None: from StructNoSQL import ActiveSelf if variable_item.items_excepted_type is ActiveSelf: if class_type is None: raise Exception( message_with_vars( message= "Cannot use the ActiveSelf attribute as the items_excepted_type when the class type is None", vars_dict={ 'field_name': variable_item.field_name })) variable_item._items_excepted_type = class_type item_default_type = try_to_get_primitive_default_type_of_item( item_type=variable_item.items_excepted_type) item_key_name = make_dict_key_var_name( variable_item.key_name) if "{i}" in variable_item.key_name: if is_nested is not True: current_nested_field_path = "" if current_field_path is None else current_field_path current_nested_database_path = [ *variable_item.database_path ] for i in range(variable_item.max_nested): if len(current_nested_database_path) > 32: print( message_with_vars( message= "Imposed a max nested database depth on field missing or with a too high nested depth limit.", vars_dict={ 'current_field_path': current_field_path, 'field_name': variable_item.field_name, 'imposedMaxNestedDepth': i })) break else: nested_variable_item = variable_item.copy() item_rendered_key_name: str = nested_variable_item.key_name.replace( "{i}", f"{i}") nested_variable_item._database_path = [ *current_nested_database_path ] nested_variable_item._key_name = item_rendered_key_name # We create a copy of the variable_item unpon which we render the key_name and add # the appropriate database_path_elements into to prepare the creation of the MapItem. map_item = MapItem( parent_field=nested_variable_item, field_type=nested_variable_item. default_field_type, model_type=nested_variable_item. items_excepted_type) # The MapItem will retrieve the key_name of its parent_field when initialized. # Hence, it is important to do the modifications on the nested_variable_item # before the initialization of the MapItem. if i > 0: current_nested_field_path += f".{variable_item.field_name}" current_nested_field_path += ".{{" + map_item.key_name + "}}" current_nested_database_path.append( DatabasePathElement( element_key=make_dict_key_var_name( map_item.key_name), default_type=nested_variable_item. default_field_type, custom_default_value= nested_variable_item. custom_default_value)) field_is_valid = self.table.fields_switch.set( key=current_nested_field_path, item=map_item) if field_is_valid is True: if variable_item.items_excepted_type not in PRIMITIVE_TYPES: self.assign_internal_mapping_from_class( class_type=variable_item. items_excepted_type, nested_field_path= current_nested_field_path, current_path_elements=[ *current_nested_database_path ], is_nested=True) current_nested_database_path.append( DatabasePathElement( element_key=nested_variable_item. field_name, default_type=nested_variable_item. default_field_type, custom_default_value= nested_variable_item. custom_default_value)) else: map_item = MapItem( parent_field=variable_item, field_type=item_default_type, model_type=variable_item.items_excepted_type) field_is_valid = self.table.fields_switch.set( current_field_path, map_item) if field_is_valid is True: items_excepted_type = variable_item.items_excepted_type if items_excepted_type not in PRIMITIVE_TYPES: new_database_dict_item_path_element = DatabasePathElement( element_key=item_key_name, default_type=item_default_type) current_path_elements = [ *variable_item.database_path, new_database_dict_item_path_element ] if isinstance(items_excepted_type, DictModel): if items_excepted_type.key_name is None: # If the key_name of a DictModel is not defined (for example, when a nested typed Dict is converted # to a DictModel) we set its key to the key of its parent plus the child keyword. So, a parent key # of itemKey will give itemKeyChild, and a parent of itemKeyChild will give itemKeyChildChild. items_excepted_type.key_name = f"{variable_item.key_name}Child" self.process_item( class_type=None, item_key_name=None, variable_item=variable_item. items_excepted_type, current_field_path=current_field_path, current_path_elements=current_path_elements ) self.assign_internal_mapping_from_class( class_type=variable_item. items_excepted_type, nested_field_path=current_field_path, current_path_elements=current_path_elements ) except Exception as e: print(e) return required_fields
def assign_internal_mapping_from_class( self, class_instance: Optional[Any] = None, class_type: Optional[type] = None, current_path_elements: Optional[List[DatabasePathElement]] = None, nested_field_path: Optional[str] = None, is_nested: Optional[bool] = False): if class_type is None: if class_instance is not None: class_type = class_instance.__class__ else: raise Exception( message_with_vars( message= "class_type or class_instance args must be passed " "to the assign_internal_mapping_from_class function")) # todo: re-implement some king of processed class types to avoid initializing # multiple times the same class when we have a nested class ? if class_type in self.table.processed_class_types: pass # return None else: pass deep_class_variables: dict = {} component_classes: Optional[List[type]] = getattr( class_type, '__mro__', None) # Instead of just retrieving the __dict__ of the current class_type, we retrieve the __dict__'s of all the # classes in the __mro__ of the class_type (hence, the class type itself, and all of the types it inherited). # If we did not do that, fields inherited from a parent class would not be detected and not be indexed. if component_classes is not None: for component_class in component_classes: deep_class_variables.update(component_class.__dict__) setup_function: Optional[callable] = deep_class_variables.get( '__setup__', None) if setup_function is not None: custom_setup_deep_class_variables: dict = class_type.__setup__() if len(custom_setup_deep_class_variables) > 0: # The deep_class_variables gotten from calling the __dict__ attribute is a mappingproxy, which cannot be modify. # In order to combine the custom_setup_deep_class_variables and the deep_class_variables variables we will iterate # over all the deep_class_variables attributes, add them to the dict create by the __setup__ function (only if # they are not found in the custom_setup_deep_class_variables dict, since the custom setup override any default # class attribute), and assign the deep_class_variables variable to our newly create and setup dict. for key, item in deep_class_variables.items(): if key not in custom_setup_deep_class_variables: custom_setup_deep_class_variables[key] = item deep_class_variables = custom_setup_deep_class_variables required_fields: List[BaseField] = [] for variable_key_name, variable_item in deep_class_variables.items(): if isinstance(variable_item, BaseField): current_field_path = "" if nested_field_path is None else nested_field_path required_fields.extend( self.process_item( class_type=class_type, item_key_name=variable_key_name, variable_item=variable_item, current_field_path=current_field_path, current_path_elements=current_path_elements, is_nested=is_nested)) setattr(class_type, 'required_fields', required_fields)
def set_update_multiple_data_elements_to_map(self, key_name: str, key_value: Any, setters: List[DynamoDBMapObjectSetter]) -> Optional[Response]: if not len(setters) > 0: # If we tried to run the query with no object setter, # she will crash when executed. So we return None. return None update_query_kwargs = { "TableName": self.table_name, "Key": {key_name: key_value}, "ReturnValues": "UPDATED_NEW" } update_expression = "SET " expression_attribute_names_dict = dict() expression_attribute_values_dict = dict() from sys import getsizeof consumed_setters: List[DynamoDBMapObjectSetter] = list() for i_setter, current_setter in enumerate(setters): current_setter_update_expression = "" current_setter_attribute_names = dict() current_setter_attribute_values = dict() for i_path, current_path_element in enumerate(current_setter.target_path_elements): current_path_key = f"#setter{i_setter}_pathKey{i_path}" current_setter_update_expression += current_path_key current_setter_attribute_names[current_path_key] = current_path_element.element_key if i_path + 1 < len(current_setter.target_path_elements): current_setter_update_expression += "." else: current_setter_update_expression += f" = :item{i_setter}" current_setter_attribute_values[f":item{i_setter}"] = current_setter.value_to_set complete_update_expression_bytes_size = getsizeof(update_expression) # complete_update_query_attribute_names_bytes_size = getsizeof(expression_attribute_names_dict) # complete_update_query_attribute_values_bytes_size = getsizeof(expression_attribute_values_dict) current_setter_update_expression_bytes_size = getsizeof(current_setter_update_expression) # current_setter_attribute_names_bytes_size = getsizeof(current_setter_attribute_names) # current_setter_attribute_values_bytes_size = getsizeof(current_setter_attribute_values) update_expression_bytes_size_if_setter_is_added = complete_update_expression_bytes_size + current_setter_update_expression_bytes_size # attributes_names_bytes_size_if_setter_is_added = complete_update_query_attribute_names_bytes_size + current_setter_attribute_names_bytes_size # attributes_values_bytes_size_if_setter_is_added = complete_update_query_attribute_values_bytes_size + current_setter_attribute_values_bytes_size if update_expression_bytes_size_if_setter_is_added < EXPRESSION_MAX_BYTES_SIZE: if i_setter > 0: update_expression += ", " update_expression += current_setter_update_expression expression_attribute_names_dict = {**expression_attribute_names_dict, **current_setter_attribute_names} expression_attribute_values_dict = {**expression_attribute_values_dict, **current_setter_attribute_values} consumed_setters.append(current_setter) else: print(message_with_vars( message="Update operation expression size has reached over 4kb. " "The operation will be divided in a secondary operation (which could also be divided)", vars_dict={ 'key_name': key_name, 'key_value': key_value, 'setters': setters, 'update_expression': update_expression, 'current_setter_update_expression': current_setter_update_expression, 'current_setter_attribute_names': current_setter_attribute_names, 'current_setter_attribute_values': current_setter_attribute_values } )) break update_query_kwargs["UpdateExpression"] = update_expression update_query_kwargs["ExpressionAttributeValues"] = expression_attribute_values_dict if len(expression_attribute_names_dict) > 0: update_query_kwargs["ExpressionAttributeNames"] = expression_attribute_names_dict response = self._execute_update_query_with_initialization_if_missing( key_name=key_name, key_value=key_value, update_query_kwargs=update_query_kwargs, setters=consumed_setters ) if len(consumed_setters) == len(setters): return response else: for setter in consumed_setters: setters.remove(setter) return self.set_update_multiple_data_elements_to_map(key_name=key_name, key_value=key_value, setters=setters)
def assign_internal_mapping_from_class(table: BaseTable, class_instance: Optional[Any] = None, class_type: Optional[Any] = None, nested_field_path: Optional[str] = None, current_path_elements: Optional[List[DatabasePathElement]] = None, is_nested: Optional[bool] = False): if current_path_elements is None: current_path_elements = list() output_mapping = dict() if class_type is None: if class_instance is not None: class_type = class_instance.__class__ else: raise Exception(message_with_vars( message="class_type or class_instance args must be passed " "to the assign_internal_mapping_from_class function" )) # todo: re-implement some king of processed class types to avoid initializing # multiple times the same class when we have a nested class ? if class_type in table.processed_class_types: pass # return None else: pass # table.processed_class_types.update({class_type}) class_variables = class_type.__dict__ required_fields = list() setup_function: Optional[callable] = class_variables.get('__setup__', None) if setup_function is not None: custom_setup_class_variables: dict = class_type.__setup__() if len(custom_setup_class_variables) > 0: # The class_variables gotten from calling the __dict__ attribute is a mappingproxy, which cannot be modify. # In order to combine the custom_setup_class_variables and the class_variables variables we will iterate # over all the class_variables attributes, add them to the dict create by the __setup__ function (only if # they are not found in the custom_setup_class_variables dict, since the custom setup override any default # class attribute), and assign the class_variables variable to our newly create and setup dict. for key, item in class_variables.items(): if key not in custom_setup_class_variables: custom_setup_class_variables[key] = item class_variables = custom_setup_class_variables for variable_key, variable_item in class_variables.items(): current_field_path = "" if nested_field_path is None else f"{nested_field_path}" try: if isinstance(variable_item, MapField): variable_item: MapField new_database_path_element = DatabasePathElement( element_key=variable_item.field_name, default_type=variable_item.field_type, custom_default_value=variable_item.custom_default_value ) variable_item._database_path = [*current_path_elements, new_database_path_element] variable_item._table = table if variable_item.required is True: required_fields.append(variable_item) current_field_path += f"{variable_item.field_name}" if len(current_field_path) == 0 else f".{variable_item.field_name}" field_is_valid = table.fields_switch.set(key=current_field_path, item=copy(variable_item)) if field_is_valid is True: output_mapping[variable_item.field_name] = assign_internal_mapping_from_class( table=table, class_type=variable_item.map_model, nested_field_path=current_field_path, current_path_elements=[*variable_item.database_path] ) elif isinstance(variable_item, BaseField): variable_item: BaseField new_database_path_element = DatabasePathElement( element_key=variable_item.field_name, default_type=variable_item.default_field_type, custom_default_value=variable_item.custom_default_value ) variable_item._database_path = [*current_path_elements, new_database_path_element] variable_item._table = table output_mapping[variable_key] = "" if variable_item.required is True: required_fields.append(variable_item) current_field_path += f"{variable_item.field_name}" if len(current_field_path) == 0 else f".{variable_item.field_name}" field_is_valid = table.fields_switch.set(key=current_field_path, item=copy(variable_item)) if field_is_valid is True: if variable_item.dict_items_excepted_type is not None: from StructNoSQL import ActiveSelf if variable_item.dict_items_excepted_type is ActiveSelf: variable_item._dict_items_excepted_type = class_type item_default_type = try_to_get_primitive_default_type_of_item(item_type=variable_item.dict_items_excepted_type) item_key_name = make_dict_key_var_name(key_name=variable_item.key_name) if "{i}" in variable_item.key_name: if is_nested is not True: current_nested_field_path = "" if nested_field_path is None else f"{nested_field_path}" current_nested_database_path = [*variable_item.database_path] for i in range(variable_item.max_nested): nested_variable_item = variable_item.copy() nested_variable_item._database_path = [*current_nested_database_path] item_rendered_key_name = nested_variable_item.key_name.replace("{i}", f"{i}") map_item = MapItem( parent_field=nested_variable_item, field_type=nested_variable_item.default_field_type, model_type=nested_variable_item.dict_items_excepted_type ) current_nested_field_path += f".{variable_item.field_name}" current_nested_field_path += ".{{" + item_rendered_key_name + "}}" current_nested_database_path.append(DatabasePathElement( element_key=make_dict_key_var_name(item_rendered_key_name), default_type=nested_variable_item.default_field_type, custom_default_value=nested_variable_item.custom_default_value )) field_is_valid = table.fields_switch.set(key=current_nested_field_path, item=map_item) if field_is_valid is True: if variable_item.dict_items_excepted_type not in PRIMITIVE_TYPES: output_mapping[item_key_name] = assign_internal_mapping_from_class( table=table, class_type=variable_item.dict_items_excepted_type, nested_field_path=current_nested_field_path, current_path_elements=[*current_nested_database_path], is_nested=True ) current_nested_database_path.append(DatabasePathElement( element_key=nested_variable_item.field_name, default_type=nested_variable_item.default_field_type, custom_default_value=nested_variable_item.custom_default_value )) else: current_field_path += ".{{" + variable_item.key_name + "}}" map_item = MapItem( parent_field=variable_item, field_type=item_default_type, model_type=variable_item.dict_items_excepted_type ) field_is_valid = table.fields_switch.set(current_field_path, map_item) if field_is_valid is True: if variable_item.dict_items_excepted_type not in PRIMITIVE_TYPES: new_database_dict_item_path_element = DatabasePathElement(element_key=item_key_name, default_type=item_default_type) output_mapping[item_key_name] = assign_internal_mapping_from_class( table=table, class_type=variable_item.dict_items_excepted_type, nested_field_path=current_field_path, current_path_elements=[*variable_item.database_path, new_database_dict_item_path_element] ) except Exception as e: print(e) setattr(class_type, "required_fields", required_fields) # We need to set the attribute, because when we go the required_fields with the get_attr # function, we did not get a reference to the attribute, but a copy of the attribute value. return output_mapping