def update_attributes(old_entity, new_entity): """ Updates the attribute(s) of a given object which is a record of an entity from Python ICAT :param old_entity: An existing entity record from Python ICAT :type object: :class:`icat.entities.ENTITY` (implementation of :class:`icat.entity.Entity`) :param new_entity: Dictionary containing the new data to be modified :type new_entity: :class:`dict` :raises BadRequestError: If the attribute cannot be found, or if it cannot be edited - typically if Python ICAT doesn't allow an attribute to be edited (e.g. modId & modTime) """ log.debug("Updating entity attributes: %s", list(new_entity.keys())) for key in new_entity: try: original_data_attribute = getattr(old_entity, key) if isinstance(original_data_attribute, datetime): new_entity[key] = DateHandler.str_to_datetime_object(new_entity[key]) except AttributeError: raise BadRequestError( f"Bad request made, cannot find attribute '{key}' within the" f" {old_entity.BeanName} entity", ) try: setattr(old_entity, key, new_entity[key]) except AttributeError: raise BadRequestError( f"Bad request made, cannot modify attribute '{key}' within the" f" {old_entity.BeanName} entity", ) return old_entity
def test_get_valid_session_details( self, flask_test_app_icat, valid_icat_credentials_header, ): session_details = flask_test_app_icat.get( "/sessions", headers=valid_icat_credentials_header, ) session_expiry_datetime = DateHandler.str_to_datetime_object( session_details.json["expireDateTime"], ) current_datetime = datetime.now(tzlocal()) time_diff = abs(session_expiry_datetime - current_datetime) time_diff_minutes = time_diff.seconds / 60 # Allows a bit of leeway for slow test execution assert time_diff_minutes < 120 and time_diff_minutes >= 118 # Check username is correct test_mechanism = config.get_config_value( APIConfigOptions.TEST_MECHANISM) test_username = config.get_config_value( APIConfigOptions.TEST_USER_CREDENTIALS)["username"] assert session_details.json[ "username"] == f"{test_mechanism}/{test_username}" # Check session ID matches the header from the request assert (session_details.json["id"] == valid_icat_credentials_header["Authorization"].split()[1])
def prepare_icat_data_for_assertion(data, remove_id=False, remove_visit_id=False): """ Remove meta attributes from ICAT data. Meta attributes contain data about data creation/modification, and should be removed to ensure correct assertion values :param data: ICAT data containing meta attributes such as modTime :type data: :class:`list` or :class:`icat.entity.EntityList` """ assertable_data = [] meta_attributes = Entity.MetaAttr for entity in data: # Convert to dictionary if an ICAT entity object if isinstance(entity, Entity): entity = entity.as_dict() for attr in meta_attributes: entity.pop(attr) for attr in entity.keys(): if isinstance(entity[attr], datetime): entity[attr] = DateHandler.datetime_object_to_str(entity[attr]) # meta_attributes is immutable if remove_id: entity.pop("id") if remove_visit_id: entity.pop("visitId") assertable_data.append(entity) return assertable_data
def entity_to_dict(self, entity, includes): """ This expands on Python ICAT's implementation of `icat.entity.Entity.as_dict()` to use set operators to create a version of the entity as a dictionary Most of this function is dedicated to recursing over included fields from a query, since this is functionality isn't part of Python ICAT's `as_dict()`. This function can be used when there are no include filters in the query/request however. :param entity: Python ICAT entity from an ICAT query :type entity: :class:`icat.entities.ENTITY` (implementation of :class:`icat.entity.Entity`) or :class:`icat.entity.EntityList` :param includes: List of fields that have been included in the ICAT query. It is assumed each element has been checked for multiple fields separated by dots, split them accordingly and flattened the resulting list. Note: ICATQuery.flatten_query_included_fields performs this functionality. :type includes: :class:`list` :return: ICAT Data (of type dictionary) ready to be serialised to JSON """ d = {} # Verifying that `includes` only has fields which are related to the entity include_set = (entity.InstRel | entity.InstMRel) & set(includes) for key in entity.InstAttr | entity.MetaAttr | include_set: if key in includes: target = getattr(entity, key) # Copy and remove don't return values so must be done separately includes_copy = includes.copy() try: includes_copy.remove(key) except ValueError: log.warning( "Key couldn't be found to remove from include list, this could" " cause an issue further on in the request", ) if isinstance(target, Entity): d[key] = self.entity_to_dict(target, includes_copy) # Related fields with one-many relationships are stored as EntityLists elif isinstance(target, EntityList): d[key] = [] for e in target: d[key].append(self.entity_to_dict(e, includes_copy)) # Add actual piece of data to the dictionary else: entity_data = getattr(entity, key) # Convert datetime objects to strings ready to be outputted as JSON if isinstance(entity_data, datetime): # Remove timezone data which isn't utilised in ICAT entity_data = DateHandler.datetime_object_to_str( entity_data) d[key] = entity_data return d
def test_valid_str(self): datetime_output = DateHandler.str_to_datetime_object( "2008-10-15 12:05:09") assert datetime_output == datetime( year=2008, month=10, day=15, hour=12, minute=5, second=9, )
def test_valid_boundary_str(self): datetime_output = DateHandler.str_to_datetime_object( "2020-02-29 20:20:20") assert datetime_output == datetime( year=2020, month=2, day=29, hour=20, minute=20, second=20, )
def test_valid_datetime(self): example_date = datetime( year=2008, month=10, day=15, hour=12, minute=5, second=9, ) str_date_output = DateHandler.datetime_object_to_str(example_date) assert str_date_output == "2008-10-15 12:05:09"
def test_valid_boundary_datetime(self): # Can't test invalid leap years as invalid datetime objects can't be created example_date = datetime( year=2020, month=2, day=29, hour=23, minute=59, second=59, ) str_date_output = DateHandler.datetime_object_to_str(example_date) assert str_date_output == "2020-02-29 23:59:59"
def get_session_details_helper(client): """ Retrieve details regarding the current session within `client` :param client: ICAT client containing an authenticated user :type client: :class:`icat.client.Client` :return: Details of the user's session, ready to be converted into a JSON response body """ session_time_remaining = client.getRemainingMinutes() session_expiry_time = ( datetime.now(tzlocal()) + timedelta(minutes=session_time_remaining) ).replace(microsecond=0) username = client.getUserName() return { "id": client.sessionId, "expireDateTime": DateHandler.datetime_object_to_str(session_expiry_time), "username": username, }
def map_distinct_attributes_to_results(distinct_attributes, query_result): """ Maps the attribute names from a distinct filter onto the results given by the result of a query When selecting multiple (but not all) attributes in a database query, the results are returned in a list and not mapped to an entity object. This means the 'normal' functions used to process data ready for output (`entity_to_dict()` for the ICAT backend) cannot be used, as the structure of the query result is different. :param distinct_attributes: List of distinct attributes from the distinct filter of the incoming request :type distinct_attributes: :class:`list` :param query_result: Results fetched from a database query (backend independent due to the data structure of this parameter) :type query_result: :class:`tuple` or :class:`list` when a single attribute is given from ICAT backend, or :class:`sqlalchemy.engine.row.Row` when used on the DB backend :return: Dictionary of attribute names paired with the results, ready to be returned to the user """ result_dict = {} for attr_name, data in zip(distinct_attributes, query_result): # Splitting attribute names in case it's from a related entity split_attr_name = attr_name.split(".") if isinstance(data, datetime): # Workaround for when this function is used on DB backend, where usually # `_make_serializable()` would fix tzinfo if data.tzinfo is None: data = data.replace(tzinfo=tzlocal()) data = DateHandler.datetime_object_to_str(data) # Attribute name is from the 'origin' entity (i.e. not a related entity) if len(split_attr_name) == 1: result_dict[attr_name] = data # Attribute name is a related entity, dictionary needs to be nested else: result_dict.update(map_nested_attrs({}, split_attr_name, data)) return result_dict
class TestDBGetWithFilters: def test_valid_get_with_filters( self, flask_test_app_db, valid_db_credentials_header, single_investigation_test_data_db, ): test_response = flask_test_app_db.get( '/investigations?where={"title": {"like": "Title for DataGateway API' ' Testing (DB)"}}', headers=valid_db_credentials_header, ) assert test_response.json == [ single_investigation_test_data_db.to_dict() ] def test_valid_no_results_get_with_filters( self, flask_test_app_db, valid_db_credentials_header, ): test_response = flask_test_app_db.get( '/investigations?where={"title": {"eq": "This filter should cause a 404 for' 'testing purposes..."}}', headers=valid_db_credentials_header, ) assert test_response.json == [] @pytest.mark.usefixtures("multiple_investigation_test_data_db") def test_valid_get_with_filters_multiple_distinct( self, flask_test_app_db, valid_db_credentials_header, ): test_response = flask_test_app_db.get( '/investigations?where={"title": {"like": "Title for DataGateway API' ' Testing (DB)"}}&distinct="title"', headers=valid_db_credentials_header, ) expected = [{ "title": f"Title for DataGateway API Testing (DB) {i}" } for i in range(5)] assert test_response.json == expected @pytest.mark.parametrize( "distinct_param, expected_response", [ pytest.param( '"title"', [{ "title": "Title for DataGateway API Testing (DB) 0" }], id="Single unrelated distinct field", ), pytest.param( '"investigationInstruments.createTime"', [ { "investigationInstruments": { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), }, }, ], id="Single related distinct field", ), pytest.param( '["createTime", "investigationInstruments.createTime"]', [ { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), "investigationInstruments": { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), }, }, ], id="Single related distinct field with unrelated field", ), pytest.param( '["investigationInstruments.createTime", "facility.id"]', [ { "facility": { "id": 1 }, "investigationInstruments": { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), }, }, ], id="Multiple related distinct fields", ), pytest.param( '["createTime", "investigationInstruments.createTime", "facility.id"]', [ { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), "facility": { "id": 1 }, "investigationInstruments": { "createTime": DateHandler.datetime_object_to_str( Constants.TEST_MOD_CREATE_DATETIME, ), }, }, ], id="Multiple related distinct fields with unrelated field", ), ], ) @pytest.mark.usefixtures("isis_specific_endpoint_data_db") def test_valid_get_with_filters_related_distinct( self, flask_test_app_db, valid_db_credentials_header, distinct_param, expected_response, ): test_response = flask_test_app_db.get( '/investigations?where={"title": {"like": "Title for DataGateway API' ' Testing (DB)"}}' f"&distinct={distinct_param}", headers=valid_db_credentials_header, ) print(test_response.json) assert test_response.json == expected_response def test_limit_skip_merge_get_with_filters( self, flask_test_app_db, valid_db_credentials_header, multiple_investigation_test_data_db, ): skip_value = 1 limit_value = 2 test_response = flask_test_app_db.get( '/investigations?where={"title": {"like": "Title for DataGateway API' ' Testing (DB)"}}' f'&skip={skip_value}&limit={limit_value}&order="id ASC"', headers=valid_db_credentials_header, ) # Copy required to ensure data is deleted at the end of the test investigation_test_data_copy = multiple_investigation_test_data_db.copy( ) filtered_investigation_data = [] filter_count = 0 while filter_count < limit_value: filtered_investigation_data.append( investigation_test_data_copy.pop(skip_value).to_dict(), ) filter_count += 1 assert test_response.json == filtered_investigation_data
def test_valid_date(self): date_output = DateHandler.is_str_a_date("2008-10-15") assert date_output is True
def test_valid_datetime_no_time(self): example_date = datetime(year=2008, month=10, day=15) str_date_output = DateHandler.datetime_object_to_str(example_date) assert str_date_output == "2008-10-15 00:00:00"
def test_invalid_str_format_order(self): with pytest.raises(BadRequestError): DateHandler.str_to_datetime_object("12:05:09 2019-10-05")
def test_invalid_str_format_symbols(self): with pytest.raises(BadRequestError): DateHandler.str_to_datetime_object("2019/10/05 12:05:09")
def test_invalid_boundary_str(self): with pytest.raises(BadRequestError): DateHandler.str_to_datetime_object("2019-02-29 12:05:09")
def test_invalid_date(self): date_output = DateHandler.is_str_a_date("25/25/2020") assert date_output is False
def test_invalid_boundary_date(self): date_output = DateHandler.is_str_a_date("29/2/2019") # There was no leap year in 2019 assert date_output is False
def test_valid_boundary_date(self): date_output = DateHandler.is_str_a_date("29/2/2020") assert date_output is True
def create_entities(client, entity_type, data): """ Add one or more results for the given entity using the JSON provided in `data` `created_icat_data` is data of `icat.entity.Entity` type that is collated to be pushed to ICAT at the end of the function - this avoids confusion over which data has/hasn't been created if the request returns an error. When pushing the data to ICAT, there is still risk an exception might be caught, so any entities already pushed to ICAT will be deleted. Python ICAT doesn't support a database rollback (or the concept of transactions) so this is a good alternative. :param client: ICAT client containing an authenticated user :type client: :class:`icat.client.Client` :param entity_type: The type of entity requested to manipulate data with :type entity_type: :class:`str` :param data: The data that needs to be created in ICAT :type data_to_update: :class:`list` or :class:`dict` :return: The created record(s) of the given entity """ log.info("Creating ICAT data for %s", entity_type) created_data = [] created_icat_data = [] if not isinstance(data, list): data = [data] for result in data: new_entity = client.new(get_icat_entity_name_as_camel_case(client, entity_type)) for attribute_name, value in result.items(): log.debug("Preparing data for %s", attribute_name) try: entity_info = new_entity.getAttrInfo(client, attribute_name) if entity_info.relType.lower() == "attribute": # Short circuiting ensures is_str_date() will only be executed if # value is a string if isinstance(value, str) and DateHandler.is_str_a_date(value): value = DateHandler.str_to_datetime_object(value) setattr(new_entity, attribute_name, value) else: # This means the attribute has a relationship with another object try: related_object = client.get(entity_info.type, value) except ICATNoObjectError as e: raise BadRequestError(e) if entity_info.relType.lower() == "many": related_object = [related_object] setattr(new_entity, attribute_name, related_object) except ValueError as e: raise BadRequestError(e) created_icat_data.append(new_entity) for entity in created_icat_data: try: entity.create() except ICATInternalError as e: for entity_json in created_data: # Delete any data that has been pushed to ICAT before the exception delete_entity_by_id(client, entity_type, entity_json["id"]) raise PythonICATError(e) except (ICATObjectExistsError, ICATParameterError, ICATValidationError) as e: for entity_json in created_data: delete_entity_by_id(client, entity_type, entity_json["id"]) raise BadRequestError(e) created_data.append(get_entity_by_id(client, entity_type, entity.id, True)) return created_data