def test_nested_mapper(e, m, r): """Set mapper as a field of another mapper. Entities could contains nested entities. Mappers of nested entities should be expressed as nested mappers in the config. This code should return a list of `Message` instances. Each `Message` instance should have `User` instance as its attribute. """ mapper = Mapper( e.Message, m.MessageModel, { "primary_key": "id", "user": Mapper({"primary_key": "id"}) }, ) load_messages = r.get("load_messages", mapper, e.Message) result = load_messages() assert isinstance(result, list) message1, message2 = result assert isinstance(message1, e.Message) assert isinstance(message2, e.Message) assert isinstance(message1.user, e.User) assert isinstance(message2.user, e.User)
def test_config_type_validation(e, m): """Config argument should be a dict.""" expected = "" with pytest.raises(MapperError) as exc_info: Mapper(object()) message = str(exc_info.value) assert message == expected with pytest.raises(MapperError) as exc_info: Mapper(e.User, m.UserModel, object()) message = str(exc_info.value) assert message == expected
def test_evaluated_field(e, m, r): """Evaluate fields which are not declared in the data source. Evaluated marker should be interpreted as a reason to ignore absence of the field directly on the data source model. Field with exactly this name will appears on the collection. """ mapper = Mapper(e.TotalMessage, m.MessageModel, { "primary_key": "id", "total": Evaluated() }) load_messages = r.get("load_total_messages", mapper, e.TotalMessage, "total") result = load_messages() assert isinstance(result, list) message1, message2 = result assert isinstance(message1, e.TotalMessage) assert isinstance(message2, e.TotalMessage) assert message1.total == 1 assert message2.total == 1
def test_related_field(e, m, r): """Set field of the related data source to the entity field. Mapper could point any field of the entity to any field of any related model of the mapped data source. """ mapper = Mapper( e.NamedMessage, m.MessageModel, { "primary_key": "id", "username": ("user", "name") }, ) load_messages = r.get("load_messages", mapper, e.NamedMessage) result = load_messages() assert isinstance(result, list) message1, message2 = result assert isinstance(message1, e.NamedMessage) assert isinstance(message2, e.NamedMessage) assert message1.username == "" assert message2.username == ""
def test_named_evaluated_field(e, m, r): """Use custom name in the data source for the evaluation result. Evaluated marker could be pointed to the field with a different name than the target attribute. """ mapper = Mapper( e.TotalMessage, m.MessageModel, { "primary_key": "id", "total": Evaluated("total_number") }, ) load_messages = r.get("load_total_messages", mapper, e.TotalMessage, "total_number") result = load_messages() assert isinstance(result, list) message1, message2 = result assert isinstance(message1, e.TotalMessage) assert isinstance(message2, e.TotalMessage) assert message1.total == 1 assert message2.total == 1
def test_data_source_type_validation(e): """Data source argument should be a Django model.""" expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.User, object()) message = str(exc_info.value) assert message == expected
def orm_all(self): mapper = Mapper(self.__entity, self.__model) model_instances = self.__all() @mapper.reader.sequence def reader_entity(entities): return entities return reader_entity(model_instances)
def test_entity_type_validation(m): """Entity argument should be a dataclass, pydantic, or attrs class.""" expected = "" with pytest.raises(MapperError) as exc_info: Mapper(object(), m.UserModel) message = str(exc_info.value) assert message == expected
def test_config_value_type_validation(e, m): """Config value should be a string.""" expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.User, m.UserModel, {"test": object()}) message = str(exc_info.value) assert message == expected
def test_nested_entities_kind_validation(e, m): """Detect if data source relations breaks the contract. If entity have a nested entity as its field, the corresponding data source field should resolve to only one object. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper( e.UserChat, m.ChatModel, { "primary_key": "id", "subscribers": Mapper({"primary_key": "id"}) }, ) message = str(exc_info.value) assert message == expected
def test_nested_entities_type_validation(e, m, value): """Detect invalid config definition. If entity have a nested entity as its field, the mapper cannot have config definition of that field which is not a Mapper. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.Message, m.MessageModel, {"primary_key": "id", "user": value}) message = str(exc_info.value) assert message == expected
def test_nested_entities_validation(e, m): """Detect if data source relations breaks the contract. If entity have a nested entity as its field, a corresponding data source field should be a relation object. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.UserGroup, m.GroupModel, {"primary_key": "id"}) message = str(exc_info.value) assert message == expected
def test_unknown_entity_fields(e, m): """Config keys should correspond to the entity fields only. There is no possibility to have random keys in the config not related to the entity. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.User, m.UserModel, {"age": "created"}) message = str(exc_info.value) assert message == expected
def test_unknown_data_source_fields(e, m): """Config values should correspond to the data source fields only. There is no possibility to point to the random strings not related to the data source. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.User, m.UserModel, {"avatar": "photo"}) message = str(exc_info.value) assert message == expected
def test_deep_nested_mapper(e, m, r): """Set mapper as a field of another field. Nested entities could contain nested entities as well. Mappers of nested entities should contain nested mappers as well. This code should return a list of `Delivery` instances. Each `Delivery` instance should have `Message` instance as its attribute. Each `Message` instance should have `User` as its attribute. """ mapper = Mapper( e.Delivery, m.MessageDeliveryModel, { "primary_key": "id", "message": Mapper({ "primary_key": "id", "user": Mapper({"primary_key": "id"}) }), }, ) load_deliveries = r.get("load_deliveries", mapper, e.Delivery) result = load_deliveries() assert isinstance(result, list) delivery1, delivery2 = result assert isinstance(delivery1, e.Delivery) assert isinstance(delivery2, e.Delivery) assert isinstance(delivery1.message, e.Message) assert isinstance(delivery2.message, e.Message) assert isinstance(delivery1.message.user, e.User) assert isinstance(delivery2.message.user, e.User)
def test_nullable_field_validation(e, m): """Detect if data source field breaks the contract. Data source cannot have nullable field if corresponding entity attribute is not annotated with Optional type. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper(e.Group, m.GroupModel, {"primary_key": "id"}) message = str(exc_info.value) assert message == expected
def test_data_source_field_missing(e, m): """Detect if data source field set is not complete. Raise exception if data source missed some fields required by entity. And there is no configuration related to the field. """ expected = ("Can not find 'primary_key' field in the %s model") % ( m.UserModel, ) with pytest.raises(MapperError) as exc_info: Mapper(e.User, m.UserModel) message = str(exc_info.value) assert message == expected
def test_result_unknown_converter(e, m, r, value): """Raise error in unclear situation. If annotation of the reader will be something unknown, we should raise MapperError. """ mapper = Mapper(e.User, m.UserModel, {"primary_key": "id"}) expected = "" with pytest.raises(MapperError) as exc_info: r.get("invalid_converter", mapper, value) message = str(exc_info.value) assert message == expected
def test_result_object_converter(e, m, r): """Return a single object. If instead of converter annotation will be an entity class, we should return a single object. Not a collection. """ mapper = Mapper(e.User, m.UserModel, {"primary_key": "id"}) load_user = r.get("load_user", mapper, e.User, e.UserId) user1 = load_user(1) assert isinstance(user1, e.User) with pytest.raises(m.UserModel.DoesNotExist): load_user(3)
def test_apply_result_converter(e): """ Infer collection converter from the function result annotation. This code should return a list of `User` instances. """ mapper = Mapper(e.User, models.UserModel, {"primary_key": "id"}) @mapper.reader def load_users() -> List[e.User]: return models.UserModel.objects.all() user1, user2 = load_users() assert isinstance(user1, e.User) assert isinstance(user2, e.User)
def test_result_list_converter(e, m, r): """Infer collection converter from the function result annotation. This code should return a list of `User` instances. """ mapper = Mapper(e.User, m.UserModel, {"primary_key": "id"}) load_users = r.get("load_users", mapper, e.User) result = load_users() assert isinstance(result, list) user1, user2 = result assert isinstance(user1, e.User) assert isinstance(user2, e.User)
def test_result_optional_converter(e, m, r): """Return a single object or None. If annotation of the reader will be an optional entity class, we should not raise DoesNotExist error. Instead of this we will return None. """ mapper = Mapper(e.User, m.UserModel, {"primary_key": "id"}) load_user = r.get("load_user_or_none", mapper, e.User, e.UserId) user1 = load_user(1) assert isinstance(user1, e.User) user3 = load_user(3) assert user3 is None
def test_related_field_validation(e, m): """Detect invalid config definition. If the mapper defines a related field, a corresponding data source field should be a relation object. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper( e.NamedMessage, m.MessageModel, { "primary_key": "id", "username": ("text", "name") }, ) message = str(exc_info.value) assert message == expected
def test_related_field_type_validation(e, m): """Detect invalid config definition. Related field definition in the mapper config should be a tuple of strings. We can not have arbitrary objects in the field definition. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper( e.NamedMessage, m.MessageModel, { "primary_key": "id", "username": ("user", object()) }, ) message = str(exc_info.value) assert message == expected
def test_related_field_kind_validation(e, m): """Detect invalid config definition. If mapper defines related field, the corresponding data source field should resolve to only one object. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper( e.Chat, m.ChatModel, { "primary_key": "id", "is_hidden": ("subscribers", "name") }, ) message = str(exc_info.value) assert message == expected
def orm_get(self, **fields): """ Get a new entity in db @param fields: fields in entity @type fields: dict @return: model_instance @rtype: type(model_instance) """ model_instance = self.__get(**fields) mapper = Mapper(self.__entity, self.__model) @mapper.reader.entity def reader_entity(entity): return entity if not model_instance: return None model_instance = reader_entity(model_instance) return model_instance
def test_nullable_field_optional_attribute(e, m, r): """Detect if data source field follows the contract. Data source can have nullable field if corresponding entity attribute annotated with Optional type. """ mapper = Mapper(e.OptionalGroup, m.GroupModel, {"primary_key": "id"}) load_groups = r.get("load_groups", mapper, e.OptionalGroup) result = load_groups() assert isinstance(result, list) group1, group2 = result assert isinstance(group1, e.OptionalGroup) assert isinstance(group2, e.OptionalGroup) assert group1.name is None assert group2.name == ""
def test_related_field_length_validation(e, m): """Detect invalid config definition. Related field could not have place in the same data source as its entity. Therefore, related field definition could not have length of one. """ expected = "" with pytest.raises(MapperError) as exc_info: Mapper( e.NamedMessage, m.MessageModel, { "primary_key": "id", "username": ("text", ) }, ) message = str(exc_info.value) assert message == expected
def test_result_raw_method(e, m, r): """Provide a way to access underling iterable object. This code should return a queryset of `User` instances. """ mapper = Mapper(e.User, m.UserModel, {"primary_key": "id"}) load_users = r.get("load_users", mapper, e.User) result = load_users.raw() assert isinstance(result, m.iterable_class) result = iter(result) user1 = next(result) user2 = next(result) assert isinstance(user1, e.User) assert isinstance(user2, e.User) with pytest.raises(StopIteration): next(result)
def test_resolve_id_field_from_foreign_key_without_config(e, m, r): """Use foreign key as a field. Original data source model could have foreign key field defined. The actual entity may require only id value with out whole related object. Code below should work with out config specifics of the `user` field. """ mapper = Mapper(e.FlatMessage, m.MessageModel, {"primary_key": "id"}) load_messages = r.get("load_messages", mapper, e.FlatMessage) result = load_messages() assert isinstance(result, list) message1, message2 = result assert isinstance(message1, e.FlatMessage) assert isinstance(message2, e.FlatMessage) assert message1.user_id == 1 assert message2.user_id == 2
def ERA(I, iters, **args): """ Find the phases of 'I' given O using the Error Reduction Algorithm. Parameters ---------- I : numpy.ndarray, (N, M, K) Merged diffraction patterns to be phased. N : the number of pixels along slowest scan axis of the detector M : the number of pixels along slow scan axis of the detector K : the number of pixels along fast scan axis of the detector iters : int The number of ERA iterations to perform. O : numpy.ndarray, (N, M, K) The real-space scattering density of the object such that: I = |F[O]|^2 where F[.] is the 3D Fourier transform of '.'. support : (numpy.ndarray, None or int), (N, M, K) Real-space region where the object function is known to be zero. If support is an integer then the N most intense pixels will be kept at each iteration. mask : numpy.ndarray, (N, M, K), optional, default (1) The valid detector pixels. Mask[i, j, k] = 1 (or True) when the detector pixel i, j, k is valid, Mask[i, j, k] = 0 (or False) otherwise. hardware : ('cpu', 'gpu'), optional, default ('cpu') Choose to run the reconstruction on a single cpu core ('cpu') or a single gpu ('gpu'). The numerical results should be identical. alpha : float, optional, default (1.0e-10) A floating point number to regularise array division (prevents 1/0 errors). dtype : (None, 'single' or 'double'), optional, default ('single') Determines the numerical precision of the calculation. If dtype==None, then it is determined from the datatype of I. Mapper : class, optional, default None A mapping class that provides the methods supplied by: phasing_3d.src.mappers.Mapper Returns ------- O : numpy.ndarray, (U, V, K) The real-space object function after 'iters' iterations of the ERA algorithm. info : dict contains diagnostics: 'I' : the diffraction pattern corresponding to object above 'eMod' : the modulus error for each iteration: eMod_i = sqrt( sum(| O_i - Pmod(O_i) |^2) / I ) 'eCon' : the convergence error for each iteration: eCon_i = sqrt( sum(| O_i - O_i-1 |^2) / sum(| O_i |^2) ) Notes ----- The ERA is the simplest iterative projection algorithm. It proceeds by progressive projections of the exit surface waves onto the set of function that satisfy the: modulus constraint : after propagation to the detector the exit surface waves must have the same modulus (square root of the intensity) as the detected diffraction patterns (the I's). support constraint : the exit surface waves (W) must be separable into some object and probe functions so that W_n = O_n x P. The 'projection' operation onto one of these constraints makes the smallest change to the set of exit surface waves (in the Euclidean sense) that is required to satisfy said constraint. Examples -------- """ # set the real and complex data precision # --------------------------------------- if 'dtype' not in args.keys() : dtype = I.dtype c_dtype = (I[0,0,0] + 1J * I[0, 0, 0]).dtype elif args['dtype'] == 'single': dtype = np.float32 c_dtype = np.complex64 elif args['dtype'] == 'double': dtype = np.float64 c_dtype = np.complex128 args['dtype'] = dtype args['c_dtype'] = c_dtype if isValid('Mapper', args) : Mapper = args['Mapper'] elif isValid('hardware', args) and args['hardware'] == 'gpu': from mappers_gpu import Mapper else : print 'using default cpu mapper' from mappers import Mapper eMods = [] eCons = [] # Set the Mapper for the single mode (default) # --------------------------------------- # this guy is responsible for doing: # I = mapper.Imap(modes) # mapping the modes to the intensity # modes = mapper.Pmod(modes) # applying the data projection to the modes # modes = mapper.Psup(modes) # applying the support projection to the modes # O = mapper.object(modes) # the main object of interest # dict = mapper.finish(modes) # add any additional output to the info dict # --------------------------------------- mapper = Mapper(I, **args) modes = mapper.modes if iters > 0 and rank == 0 : print '\n\nalgrithm progress iteration convergence modulus error' for i in range(iters) : modes0 = modes.copy() # modulus projection # ------------------ modes = mapper.Pmod(modes) modes1 = modes.copy() # support projection # ------------------ modes = mapper.Psup(modes) # metrics #modes1 -= modes0 #eMod = mapper.l2norm(modes1, modes0) eMod = mapper.Emod(modes) modes0 -= modes eCon = mapper.l2norm(modes0, modes) if rank == 0 : update_progress(i / max(1.0, float(iters-1)), 'ERA', i, eCon, eMod ) eMods.append(eMod) eCons.append(eCon) info = {} info['eMod'] = eMods info['eCon'] = eCons info.update(mapper.finish(modes)) O = mapper.object(modes) return O, info