def mutate(self, info, resource_data=None): # We must merge in existing resource.data if we are updating if R.has('id', resource_data): # New data gets priority, but this is a deep merge. resource_data['data'] = R.merge_deep( Resource.objects.get(id=resource_data['id']).data, R.prop_or({}, 'data', resource_data)) # Modifies defaults value to add .data.graph # We could decide in the future to generate this derived data on the client, but it's easy enough to do here modified_resource_data = enforce_unique_props(resource_fields, resource_data) # Make sure that all props are unique that must be, either by modifying values or erring. update_or_create_values = input_type_parameters_for_update_or_create( resource_fields, modified_resource_data) # Add the sankey data unless we are updating the instance without updating instance.data update_or_create_values_with_sankey_data = R.merge( update_or_create_values, dict(defaults=add_sankey_graph_to_resource_dict( update_or_create_values['defaults']))) if R.has( 'defaults', update_or_create_values) else update_or_create_values resource, created = update_or_create_with_revision( Resource, update_or_create_values_with_sankey_data) return UpsertResource(resource=resource)
def mutate(self, info, settings_data=None): # We must merge in existing settings.data if we are updating data if R.has('id', settings_data) and R.has('data', settings_data): # New data gets priority, but this is a deep merge. settings_data['data'] = R.merge_deep( Settings.objects.get(id=settings_data['id']).data, settings_data['data']) # Make sure that all props are unique that must be, either by modifying values or erring. modified_settings_data = enforce_unique_props(settings_fields, settings_data) update_or_create_values = input_type_parameters_for_update_or_create( settings_fields, modified_settings_data) settings, created = update_or_create_with_revision( Settings, update_or_create_values) return UpsertSettings(settings=settings)
def mutate(self, info, location_data=None): with transaction.atomic(): deleted_location_response = delete_if_marked_for_delete(Location, UpsertLocation, 'location', location_data) if deleted_location_response: return deleted_location_response # We must merge in existing location.data if we are updating data if R.has('id', location_data) and R.has('data', location_data): # New data gets priority, but this is a deep merge. location_data['data'] = R.merge_deep( Location.objects.get(id=location_data['id']).data, location_data['data'] ) # Make sure that all props are unique that must be, either by modifying values or erring. modified_location_data = enforce_unique_props(location_fields, location_data) update_or_create_values = input_type_parameters_for_update_or_create(location_fields, modified_location_data) location, created = update_or_create_with_revision(Location, update_or_create_values) return UpsertLocation(location=location)
def create_sample_user_state(cls, regions, projects, user_state_dict): """ Persists sample user state data into a UserState :param cls: The UserState class :param {[Region]} regions: Persisted sample regions :param {[Projects]} projects: Persisted sample projects :param user_state_dict: Sample data in the form: dict( username="******", # This will be mapped to the User id in create_sample_user_state data=dict( userRegions=[ dict( region=dict(key='belgium'), # key is converted to persisted Region's id mapbox=dict(viewport=dict( latitude=50.5915, longitude=2.0165, zoom=7 )), ) ] ) ), :param locations :param search_locations Search locations that match 0 or more locations :return: """ user = get_user_model().objects.get(username=user_state_dict['username']) user_state_values = R.merge_deep( # Skip username and data, they are handled above and below R.omit(['username', 'data'], user_state_dict), # Convert data.region_keys to data.user_region ids dict(user=user, data=form_sample_user_state_data(regions, projects, R.prop('data', user_state_dict)))) # Save the user_state with the complete data user_state = cls(**user_state_values) user_state.save() return user_state
def mutate(self, info, user_state_data=None): """ Update or create the user state :param info: :param user_state_data: :return: """ # Check that all the scope instances in user_state.data exist. We permit deleted instances for now. new_data = R.prop_or({}, 'data', user_state_data) # Copy since Graphene reuses this data copied_new_data = copy.deepcopy(new_data) old_user_state_data = UserState.objects.get( id=user_state_data['id'] ).data if R.prop_or(None, 'id', user_state_data) else None # Inspect the data and find all scope instances within UserState.data # This includes userRegions[*].region, userProject[*].project and within userRegions and userProjects # userSearch.userSearchLocations[*].search_location and whatever the implementing libraries define # in addition updated_new_data = validate_and_mutate_scope_instances( user_state_scope_instances_config, copied_new_data ) # If either userProjects or userRegions are null, it means those scope instances aren't part # of the update, so merge in the old values if R.prop_or(None, 'id', user_state_data) and R.any_satisfy( lambda user_scope_key: not R.prop_or(None, user_scope_key, updated_new_data), ['userProjects', 'userRegions'] ): # The special update case where one userScope collection is null, # indicates that we are only updating one userScope object. The rest # should remain the same and not be removed for user_scope_key in ['userProjects', 'userRegions']: # Database values old_user_scopes_by_id = user_scope_instances_by_id( user_scope_key, old_user_state_data ) # New values with updates applied new_user_scopes_by_id = user_scope_instances_by_id( user_scope_key, updated_new_data ) # Prefer the old over the new, merging all objects but overriding lists # We override lists because a non-null list always replaces the old list in the database updated_new_data[user_scope_key] = R.values(R.merge_deep( old_user_scopes_by_id, new_user_scopes_by_id, MyMerger( # pass in a list of tuples,with the # strategies you are looking to apply # to each type. [ (list, ["override_non_null"]), (dict, ["merge"]) ], # next, choose the fallback strategies, # applied to all other types: ["override"], # finally, choose the strategies in # the case where the types conflict: ["override"] ) )) # Update user_state_data the updated data modified_user_state_data = R.merge(user_state_data, dict(data=updated_new_data)) # id or user.id can be used to identify the existing instance id_props = R.compact_dict( dict( id=R.prop_or(None, 'id', modified_user_state_data), user_id=R.item_str_path_or(None, 'user.id', modified_user_state_data) ) ) def fetch_and_merge(modified_user_state_data, props): existing = UserState.objects.filter(**props) # If the user doesn't have a user state yet if not R.length(existing): return modified_user_state_data return merge_data_fields_on_update( ['data'], R.head(existing), # Merge existing's id in case it wasn't in user_state_data R.merge(modified_user_state_data, R.pick(['id'], existing)) ) modified_data = R.if_else( R.compose(R.length, R.keys), lambda props: fetch_and_merge(modified_user_state_data, props), lambda _: modified_user_state_data )(id_props) update_or_create_values = input_type_parameters_for_update_or_create( user_state_fields, # Make sure that all props are unique that must be, either by modifying values or erring. enforce_unique_props( user_state_fields, modified_data) ) user_state, created = update_or_create_with_revision(UserState, update_or_create_values) return UpsertUserState(user_state=user_state)
dict(key='source', name='Source', targets=['conversion']), dict(key='conversion', name='Conversion', targets=['distribution']), dict(key='distribution', name='Distribution', targets=['demand']), dict(key='demand', name='Demand', targets=['reconversion', 'sink']), dict(key='reconversion', name='Reconversion', targets=['demand']), dict(key='sink', name='Sink', targets=[]) ] ) ) # Map the settings to merge it into each resource # Settings are stored in resource.data.settings sample_resources = R.map( lambda resource_data: R.merge_deep( resource_data, dict( data=sample_settings ) ), [ dict( name='Minerals', data=dict( material='Minerals', raw_data=[ 'Other Global Imports;Shipments, location generalized;51.309933, 3.055030;Source;22,469,843', 'Knauf (Danilith) BE;Waregemseweg 156-142 9790 Wortegem-Petegem, Belgium;50.864762, 3.479308;Conversion;657,245', "MPRO Bruxelles;Avenue du Port 67 1000 Bruxelles, Belgium;50.867486, 4.352543;Distribution;18,632", 'Residential Buildings (all typologies);Everywhere in Brussels;NA;Demand;3,882,735', 'Duplex House Typology;Everywhere in Brussels;NA;Demand;13,544', 'Apartment Building Typology;Everywhere in Brussels;NA;Demand;34,643', 'New West Gypsum Recycling;9130 Beveren, Sint-Jansweg 9 Haven 1602, Kallo, Belgium;51.270229, 4.261048;Reconversion;87,565',