def setUp(self): delete_sample_user_states() self.user_state_schema = create_user_state_config( default_class_config()) self.user_states = create_sample_user_states( UserState, get_region_model(), get_project_model(), get_location_schema()['model_class'], get_search_location_schema()['model_class']) # Gather all unique sample users self.users = list( set(R.map(lambda user_state: user_state.user, self.user_states))) self.client = client_for_testing(schema(), self.users[0]) # Gather all unique sample regions self.regions = R.compose( # Forth Resolve persisted Regions R.map(lambda id: get_region_model().objects.get(id=id)), # Third make ids unique lambda ids: list(set(ids)), # Second map each to the region id R.map(R.item_str_path('region.id')), # First flat map the user regions of all user_states R.chain(lambda user_state: R.item_str_path( 'data.userRegions', user_state.__dict__)))(self.user_states) # Gather all unique sample projects self.projects = R.compose( # Forth Resolve persisted Projects R.map(lambda id: get_project_model().objects.get(id=id)), # Third make ids unique lambda ids: list(set(ids)), # Second map each to the project id R.map(R.item_str_path('project.id')), # First flat map the user regions of all user_states R.chain(lambda user_state: R.item_str_path( 'data.userProjects', user_state.__dict__)))(self.user_states) self.locations = create_local_sample_locations( get_location_schema()['model_class']) def extract_search_location_ids(user_regions): return R.map( R.item_str_path('searchLocation.id'), R.chain(R.item_str_path('userSearch.userSearchLocations'), user_regions)) # Gather all unique searches locations from userRegions. # user searches could also be in userProjects, but we'll ignore that self.search_locations = R.compose( # Forth Resolve persisted UserSearches lambda ids: R.map( lambda id: get_search_location_schema()['model_class'].objects. get(id=id), ids), # Third make ids unique lambda ids: list(set(ids)), # Chain to a flat list of user search location ids lambda user_regions: extract_search_location_ids(user_regions), # First flat map the user regions of all user_states R.chain(lambda user_state: R.item_str_path( 'data.userRegions', user_state.__dict__)))(self.user_states) # Can be set by inheritors self.additional_user_scope_data = {}
def dump_graphql_keys(dct): """ Convert a dict to a graphql input parameter keys in the form Also camelizes keys if the are slugs and handles complex types. If a value has read=IGNORE it is omitted key1 key2 key3 key4 { subkey1 ... } ... :param dct: keyed by field :return: """ from rescape_graphene.graphql_helpers.schema_helpers import IGNORE, DENY return R.join('\n', R.values(R.map_with_obj( dump_graphene_type, R.filter_dict( lambda key_value: not R.compose( lambda v: R.contains(v, [IGNORE, DENY]), lambda v: R.prop_or(None, 'read', v) )(key_value[1]), dct ) )))
def test_create(self): result, new_result = quiz_model_mutation_create( self.client, graphql_update_or_create_project, 'createProject.project', dict( name='Carre', key='carre', geojson={ 'type': 'FeatureCollection', 'features': [{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [[49.5294835476, 2.51357303225], [51.4750237087, 2.51357303225], [51.4750237087, 6.15665815596], [49.5294835476, 6.15665815596], [49.5294835476, 2.51357303225]]] } }] }, data=dict(), locations=R.map(R.compose(R.pick(['id']), lambda l: l.__dict__), self.locations), user=R.pick(['id'], R.head(self.users).__dict__), ), dict(key=r'carre.+')) versions = Version.objects.get_for_object(get_project_model().objects.get( id=R.item_str_path('data.createProject.project.id', result) )) assert len(versions) == 1
def test_update(self): result, update_result = quiz_model_mutation_update( self.client, graphql_update_or_create_project, 'createProject.project', 'updateProject.project', dict( name='Carre', key='carre', geojson={ 'type': 'FeatureCollection', 'features': [{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [[49.4426671413, 5.67405195478], [50.1280516628, 5.67405195478], [50.1280516628, 6.24275109216], [49.4426671413, 6.24275109216], [49.4426671413, 5.67405195478]]] } }] }, data=dict(), locations=R.map(R.compose(R.pick(['id']), lambda l: l.__dict__), self.locations), user=R.pick(['id'], R.head(self.users).__dict__), ), # Update the coords and limit to one location dict( geojson={ 'features': [{ "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [[49.5294835476, 2.51357303225], [51.4750237087, 2.51357303225], [51.4750237087, 6.15665815596], [49.5294835476, 6.15665815596], [49.5294835476, 2.51357303225]]] } }] }, locations=R.map(R.compose(R.pick(['id']), lambda l: l.__dict__), [R.head(self.locations)]) ) ) versions = Version.objects.get_for_object(get_project_model().objects.get( id=R.item_str_path('data.updateProject.project.id', update_result) )) assert len(versions) == 2
def convert_foreign_key_to_id(scope_obj): # Find ForeignKey attributes and map the class field name to the foreign key id field # E.g. region to region_id, user to user_id, etc converters = R.compose( R.from_pairs, R.map(lambda field: [field.name, field.attname]), R.filter(lambda field: R.isinstance(ForeignKey, field)))( model._meta.fields) # Convert scopo_obj[related_field] = {id: x} to scope_obj[related_field_id] = x return R.from_pairs( R.map_with_obj_to_values( lambda key, value: [converters[key], R.prop('id', value)] if R.has(key, converters) else [key, value], scope_obj))
def find_scope_instances(user_state_scope, new_data): """ Retrieve the scope instances to verify the Ids. Scope instances must have ids unless they are allowed to be created/updated during the userState mutation (such as searchLocations) :param new_data: The data to search :param user_state_scope Dict with 'pick' in the shape of the instances we are looking for in new_data, e.g. dict(userRegions={region: True}) to search new_data.userRegions[] for all occurrences of {region:...} and 'key' which indicates the actually key of the instance (e.g. 'region' for regions) :return: dict( instances=Instances actually in the database, ) """ def until(key, value): return key != R.prop('key', user_state_scope) return R.compose( lambda scope_dict: dict( # See which instances with ids are actually in the database # If any are missing we have an invalid update or need to create those instances if permitted instances=list( find_scope_instances_by_id(R.prop('model', user_state_scope), scope_dict['scope_ids'])), # The path from userRegions or userProjects to the scope instances, used to replace # a null update value with the existing values user_scope_path=list(R.keys(R.flatten_dct(user_state_scope, '.')))[ 0], **scope_dict), lambda scope_objs: dict( # Unique by id or accept if there is no id, this loses data, but it's just for validation scope_objs=R.unique_by( lambda obj: R.prop_or(str(now()), 'id', obj['value']), scope_objs), scope_ids=R.unique_by( R.identity, compact( R.map( lambda scope_obj: R.prop_or(None, 'id', scope_obj[ 'value']), scope_objs)))), # Use the pick key property to find the scope instances in the data # If we don't match anything we can get null or an empty item. Filter/compact these out R.filter(lambda obj: obj['value'] and (not isinstance( obj['value'], list) or R.length(obj['value']) != 0)), R.map(lambda pair: dict(key=pair[0], value=pair[1])), lambda flattened_data: R.to_pairs(flattened_data), lambda data: R.flatten_dct_until( R.pick_deep_all_array_items(R.prop('pick', user_state_scope), data ), until, '.'))(new_data)
def resolve_scope_instance(scope_key, user_scope_instance): # Replace key with id id = R.compose( # third get the id if it exists R.prop_or(None, 'id'), # second resolve the scope instance if it exists lambda k: R.prop_or(None, k, scope_instances_by_key), # first get the key R.item_str_path(f'{scope_key}.key'))(user_scope_instance) return { scope_key: R.compact_dict( dict( # Resolve the persisted Scope instance by key id=id ) if id else dict( # Otherwise pass everything so the server can create the instance # (Currently only supported for projects) user_scope_instance[scope_key])) }
def resolver_for_feature_collection(resource, context, **kwargs): """ Like resolver but takes care of converting the geos value stored in the field to a dict that has the values we want to resolve, namely type and features. :param {string} resource: The instance whose json field data is being resolved :param {ResolveInfo} context: Graphene context which contains the fields queried in field_asts :return: {DataTuple} Standard resolver return value """ # Take the camelized keys. We don't store data fields slugified. We leave them camelized selections = R.map(lambda sel: sel.name.value, context.field_asts[0].selection_set.selections) # Recover the json by parsing the string provided by GeometryCollection and mapping the geometries property to features json = R.compose( # Map the value GeometryCollection to FeatureCollection for the type property R.map_with_obj(lambda k, v: R.if_else( R.equals('type'), R.always('FeatureCollection'), R.always(v) )(k)), # Map geometries to features: [{type: Feature, geometry: geometry}] lambda dct: R.merge( # Remove geometries R.omit(['geometries'], dct), # Add features containing the geometries dict(features=R.map( lambda geometry: dict(type='Feature', geometry=geometry), R.prop_or([], 'geometries', dct)) ) ), )(ast.literal_eval(R.prop(context.field_name, resource).json)) # Identify the keys that are actually in resource[json_field_name] all_selections = R.filter( lambda key: key in json, selections ) # Pick out the values that we want result = R.pick(all_selections, json) # Return in the standard Graphene DataTuple return namedtuple('DataTuple', R.keys(result))(*R.values(result))
def mutate(self, info, search_location_data=None): deleted_search_location_response = delete_if_marked_for_delete( SearchLocation, UpsertSearchLocation, 'search_location', search_location_data) if deleted_search_location_response: return deleted_search_location_response modified_search_location_data = R.compose( # Make sure that all props are unique that must be, either by modifying values or erring. lambda data: enforce_unique_props(search_location_fields, data), # Remove the many to many values. They are saved separately lambda data: R.omit(['jurisdictions'], data))(search_location_data) update_or_create_values = input_type_parameters_for_update_or_create( search_location_fields, modified_search_location_data) search_location, created = update_or_create_with_revision( SearchLocation, update_or_create_values) # SearchJurisdictions can be created during the creation of search_locations if R.prop_or(False, 'jurisdictions', search_location_data): existing_search_intersections_by_id = R.index_by( R.prop('id'), search_location.jurisdictions.all()) for search_jurisdiction_unsaved in R.prop('intersections', search_location_data): # existing instances have an id search_jursidiction_id = R.prop_or( None, 'id', search_jurisdiction_unsaved) search_jurisdiction, created = update_or_create_with_revision( SearchJurisdiction, R.merge( R.prop(search_jursidiction_id, existing_search_intersections_by_id) if search_jursidiction_id else {}, search_jurisdiction_unsaved)) # Once saved, add it to the search location search_location.jurisdictions.set(search_jurisdiction) return UpsertSearchLocation(search_location=search_location)
def sample_user_state_with_search_locations_and_additional_scope_instances( user_scope_name, sample_user_state): return R.fake_lens_path_set( f'data.{user_scope_name}'.split('.'), R.map( lambda user_scope: R.compose( # Gives applications a chance to add the needed additional scope instances, # e.g. userDesignFeatures lambda user_scope: create_additional_scope_instance_properties(user_scope), lambda user_scope: R.merge( user_scope, dict(userSearch=dict(userSearchLocations=R.map( lambda i_search_location: dict( # Just return with the id since the full data is in the database searchLocation=R.pick(['id'], i_search_location[1]), # Set the first search_location to active activity=dict(isActive=i_search_location[0] == 0)), enumerate(search_locations))))))(user_scope), R.item_str_path(f'data.{user_scope_name}', sample_user_state)), sample_user_state)
def quiz_model_mutation_update(client, graphql_update_or_create_function, create_path, update_path, values, update_values): """ Tests an update mutation for a model by calling a create with the given values then an update with the given update_values (plus the create id) :param client: The Apollo Client :param graphql_update_or_create_function: The update or create mutation function for the model. Expects client and input values :param create_path: The path to the result of the create in the data object (e.g. createRegion.region) :param update_path: The path to the result of the update in the data object (e.g. updateRegion.region) :param values: The input values to use for the create :param update_values: The input values to use for the update. This can be as little as one key value :return: """ result = graphql_update_or_create_function(client, values=values) assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) # Extract the result and map the graphql keys to match the python keys created = R.compose( lambda r: R.map_keys(lambda key: underscore(key), r), lambda r: R.item_str_path(f'data.{create_path}', r))(result) # look at the users added and omit the non-determinant dateJoined assert values == pick_deep(created, values) # Update with the id and optionally key if there is one + update_values update_result = graphql_update_or_create_function( client, R.merge_all([ dict(id=created['id']), dict(key=created['key']) if R.prop_or(False, 'key', created) else {}, update_values ])) assert not R.has('errors', update_result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', update_result))) updated = R.item_str_path(f'data.{update_path}', update_result) assert created['id'] == updated['id'] assert update_values == pick_deep(update_values, updated) return result, update_result
def quiz_model_versioned_query(client, model_class, model_query, result_name, version_count_expected, props, omit_props): """ Tests a versioned query for a model with variables :param client: Apollo client :param model_class: Model class :param model_query: Model's query that should return one result (as a filter) number of items in the database that match props :param result_name: The name of the results in data.[result_name].objects :param version_count_expected The number of versions of the instance we expect :param props: The props to query to find a single instance. Should just be {id:...} :param omit_props: Props to omit from assertions because they are nondeterminate :return: """ result = model_query( client, variables=dict(objects=R.to_array_if_not(dict(instance=props)))) # Check against errors assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) assert R.compose( R.length, R.item_str_path_or( [], f'data.{result_name}.objects'))(result) == version_count_expected
def quiz_model_paginated_query(client, model_class, paginated_query, result_name, page_count_expected, props, omit_props, order_by=None, page_size=1): """ Tests a pagination query for a model with variables :param client: Apollo client :param model_class: Model class :param paginated_query: Model's pagination query :param page_count_expected: The number of pages expected when the page_size is 1, in other words the number of items in the database that match props :param result_name: The name of the results in data.[result_name].objects :param props: The props to query, not including pagination :param omit_props: Props to omit from assertions because they are nondeterminate :param order_by: Order by page-level prop :param page_size: Default 1 :return the first result (first page) and final result (last page) for further testing: """ result = paginated_query(client, variables=dict(page=1, page_size=page_size, order_by=order_by, objects=R.to_array_if_not(props))) # Check against errors assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) first_page_objects = R.item_path(['data', result_name, 'objects'], result) # Assert we got 1 result because our page is size 1 assert page_size == R.compose( R.length, R.map(R.omit(omit_props)), )(first_page_objects) remaining_ids = list( set( R.map( R.prop('id'), model_class.objects.filter(*process_filter_kwargs( model_class, **R.map_keys(underscore, props))).order_by( *order_by.split(',')))) - set(R.map(R.compose(int, R.prop('id')), first_page_objects))) page_info = R.item_path(['data', result_name], result) # We have page_size pages so there should be a total number of pages # of what we specified for page_count_expected assert page_info['pages'] == page_count_expected assert page_info['hasNext'] == True assert page_info['hasPrev'] == False # Get the final page new_result = paginated_query(client, variables=dict( page=page_count_expected, page_size=page_info['pageSize'], order_by=order_by, objects=R.to_array_if_not(props))) # Make sure the new_result matches one of the remaining ids assert R.contains( R.item_path(['data', result_name, 'objects', 0, 'id'], new_result), remaining_ids) new_page_info = R.item_path(['data', result_name], new_result) # Still expect the same page count assert new_page_info['pages'] == page_count_expected # Make sure it's the last page assert new_page_info['hasNext'] == False assert new_page_info['hasPrev'] == True return [result, new_result]
def mutate(self, info, user_state_data=None): """ Update or create the user state :param info: :param user_state_data: :return: """ # Check that all the scope instances in user_state.data exist. We permit deleted instances for now. new_data = R.prop_or({}, 'data', user_state_data) # Copy since Graphene reuses this data copied_new_data = copy.deepcopy(new_data) old_user_state_data = UserState.objects.get( id=user_state_data['id'] ).data if R.prop_or(None, 'id', user_state_data) else None # Inspect the data and find all scope instances within UserState.data # This includes userRegions[*].region, userProject[*].project and within userRegions and userProjects # userSearch.userSearchLocations[*].search_location and whatever the implementing libraries define # in addition updated_new_data = validate_and_mutate_scope_instances( user_state_scope_instances_config, copied_new_data ) # If either userProjects or userRegions are null, it means those scope instances aren't part # of the update, so merge in the old values if R.prop_or(None, 'id', user_state_data) and R.any_satisfy( lambda user_scope_key: not R.prop_or(None, user_scope_key, updated_new_data), ['userProjects', 'userRegions'] ): # The special update case where one userScope collection is null, # indicates that we are only updating one userScope object. The rest # should remain the same and not be removed for user_scope_key in ['userProjects', 'userRegions']: # Database values old_user_scopes_by_id = user_scope_instances_by_id( user_scope_key, old_user_state_data ) # New values with updates applied new_user_scopes_by_id = user_scope_instances_by_id( user_scope_key, updated_new_data ) # Prefer the old over the new, merging all objects but overriding lists # We override lists because a non-null list always replaces the old list in the database updated_new_data[user_scope_key] = R.values(R.merge_deep( old_user_scopes_by_id, new_user_scopes_by_id, MyMerger( # pass in a list of tuples,with the # strategies you are looking to apply # to each type. [ (list, ["override_non_null"]), (dict, ["merge"]) ], # next, choose the fallback strategies, # applied to all other types: ["override"], # finally, choose the strategies in # the case where the types conflict: ["override"] ) )) # Update user_state_data the updated data modified_user_state_data = R.merge(user_state_data, dict(data=updated_new_data)) # id or user.id can be used to identify the existing instance id_props = R.compact_dict( dict( id=R.prop_or(None, 'id', modified_user_state_data), user_id=R.item_str_path_or(None, 'user.id', modified_user_state_data) ) ) def fetch_and_merge(modified_user_state_data, props): existing = UserState.objects.filter(**props) # If the user doesn't have a user state yet if not R.length(existing): return modified_user_state_data return merge_data_fields_on_update( ['data'], R.head(existing), # Merge existing's id in case it wasn't in user_state_data R.merge(modified_user_state_data, R.pick(['id'], existing)) ) modified_data = R.if_else( R.compose(R.length, R.keys), lambda props: fetch_and_merge(modified_user_state_data, props), lambda _: modified_user_state_data )(id_props) update_or_create_values = input_type_parameters_for_update_or_create( user_state_fields, # Make sure that all props are unique that must be, either by modifying values or erring. enforce_unique_props( user_state_fields, modified_data) ) user_state, created = update_or_create_with_revision(UserState, update_or_create_values) return UpsertUserState(user_state=user_state)
def create_sample_user_states( cls, region_cls, project_cls, location_cls, search_location_cls, create_sample_locations=create_local_sample_locations, create_sample_search_locations=create_local_sample_search_locations, create_additional_scope_instance_properties=lambda user_scope_instance: user_scope_instance): """ :param cls: The UserState class :param region_cls: :param project_cls: :param location_cls: :param search_location_cls: :param create_sample_locations: Defaults to create_local_sample_locations. Expects the location_cls as the only arg :param create_search_sample_locations: Defaults to create_local_sample_search_locations. Expects the serach_location_cls and a list of sample locations. The locations can be ignored if creating samples independent of the locations :param create_additional_scope_instance_properties Function that takes each user_scope_instance and adds properties to it if needed. This corresponds with schemas defined by users in additional_user_scope_schemas :return: """ users = create_sample_users() # Create regions for the users to associate with. A region also needs and owner so we pass users to the function regions = create_sample_regions(region_cls) projects = create_sample_projects(project_cls, users, regions) locations = create_sample_locations(location_cls) search_locations = create_sample_search_locations(search_location_cls, locations) # Assign all the locations to each project for project in projects: project.locations.add(*locations) # Merge search_locations into each userScope dict def sample_user_state_with_search_locations_and_additional_scope_instances( user_scope_name, sample_user_state): return R.fake_lens_path_set( f'data.{user_scope_name}'.split('.'), R.map( lambda user_scope: R.compose( # Gives applications a chance to add the needed additional scope instances, # e.g. userDesignFeatures lambda user_scope: create_additional_scope_instance_properties(user_scope), lambda user_scope: R.merge( user_scope, dict(userSearch=dict(userSearchLocations=R.map( lambda i_search_location: dict( # Just return with the id since the full data is in the database searchLocation=R.pick(['id'], i_search_location[1]), # Set the first search_location to active activity=dict(isActive=i_search_location[0] == 0)), enumerate(search_locations))))))(user_scope), R.item_str_path(f'data.{user_scope_name}', sample_user_state)), sample_user_state) # Convert all sample user_state dicts to persisted UserState instances # Use the username to match a real user user_states = R.map( lambda sample_user_state: create_sample_user_state( cls, regions, projects, sample_user_state), # Adds search_locations to each userState.data.[userRegions[*]|userProjects[*]].user_search.userSearchLocations R.compose( lambda sample_user_states: R.map( lambda sample_user_state: sample_user_state_with_search_locations_and_additional_scope_instances( 'userProjects', sample_user_state), sample_user_states), lambda sample_user_states: R.map( lambda sample_user_state: sample_user_state_with_search_locations_and_additional_scope_instances( 'userRegions', sample_user_state), sample_user_states), )(sample_user_states)) return user_states
def handle_can_mutate_related(model, related_model_scope_config, data, validated_scope_objs_instances_and_ids): """ Mutates the given related models of an instance if permitted See rescape-region's UserState for a working usage :param model: The related model :param related_model_scope_config: Configuration of the related model relative to the referencing instance :param data: The data containing thphee related models dicts to possibly mutate with :param validated_scope_objs_instances_and_ids: Config of the related objects that have been validated as existing in the database for objects not being created :return: Possibly mutates instances, returns data with newly created ids set """ def make_fields_unique_if_needed(scope_obj): # If a field needs to be unique, like a key, call it's unique_with method return R.map_with_obj( lambda key, value: R.item_str_path_or( R.identity, f'field_config.{key}.unique_with', related_model_scope_config)(scope_obj), scope_obj) def convert_foreign_key_to_id(scope_obj): # Find ForeignKey attributes and map the class field name to the foreign key id field # E.g. region to region_id, user to user_id, etc converters = R.compose( R.from_pairs, R.map(lambda field: [field.name, field.attname]), R.filter(lambda field: R.isinstance(ForeignKey, field)))( model._meta.fields) # Convert scopo_obj[related_field] = {id: x} to scope_obj[related_field_id] = x return R.from_pairs( R.map_with_obj_to_values( lambda key, value: [converters[key], R.prop('id', value)] if R.has(key, converters) else [key, value], scope_obj)) def omit_to_many(scope_obj): return R.omit(R.map(R.prop('attname'), model._meta.many_to_many), scope_obj) # This indicates that scope_objs were submitted that didn't have ids # This is allowed if those scope_objs can be created/updated when the userState is mutated if R.prop_or(False, 'can_mutate_related', related_model_scope_config): for scope_obj_key_value in validated_scope_objs_instances_and_ids[ 'scope_objs']: scope_obj = scope_obj_key_value['value'] scope_obj_path = scope_obj_key_value['key'] if R.length(R.keys(R.omit(['id'], scope_obj))): modified_scope_obj = R.compose( convert_foreign_key_to_id, omit_to_many, make_fields_unique_if_needed)(scope_obj) if R.prop_or(False, 'id', scope_obj): # Update, we don't need the result since it's already in user_state.data instance, created = model.objects.update_or_create( defaults=R.omit(['id'], modified_scope_obj), **R.pick(['id'], scope_obj)) else: # Create instance = model(**modified_scope_obj) instance.save() # We need to replace the object # passed in with an object containing the id of the instance data = R.fake_lens_path_set(scope_obj_path.split('.'), R.pick(['id'], instance), data) for to_many in model._meta.many_to_many: if to_many.attname in R.keys(scope_obj): # Set existing related values to the created/updated instances getattr(instance, to_many.attname).set( R.map(R.prop('id'), scope_obj[to_many.attname])) return data