class UserStateQuery(ObjectType): user_states = graphene.List( R.prop('graphene_class', user_state_config), **top_level_allowed_filter_arguments(R.prop('graphene_fields', user_state_config), R.prop('graphene_class', user_state_config)) ) @login_required def resolve_user_states(self, info, **kwargs): """ Resolves only the user_state of the current user. If the user is_staff or is_superuser then the user id will not be passed implicitly to the query :param info: :param kwargs: :return: """ context = info.context user = R.prop_or(None, 'user', context) admin = user.is_staff or user.is_superuser q_expressions = process_filter_kwargs( UserState, **R.merge_all([ dict(deleted__isnull=True), kwargs, dict(user__id=user.id) if not admin else {} ]) ) return UserState.objects.filter( *q_expressions )
def test_query(self): all_result = graphql_query_resources(self.client) assert not R.has('errors', all_result), R.dump_json(R.prop('errors', all_result)) results = graphql_query_resources(self.client, dict(name='String'), variable_values=dict(name='Minerals')) # Check against errors assert not R.has('errors', results), R.dump_json(R.prop('errors', results)) assert 1 == R.length(R.item_path(['data', 'resources'], results))
def quiz_model_query(client, model_query_function, result_name, variables, expect_length=1): """ Tests a query for a model with variables that produce exactly one result :param client: Apollo client :param model_query_function: Query function expecting the client and variables :param result_name: The name of the result object in the data object :param variables: key value variables for the query :param expect_length: Default 1. Optional number items to expect :return: returns the result for further assertions """ all_result = model_query_function(client) assert not R.has('errors', all_result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', all_result))) result = model_query_function(client, variables=variables) # Check against errors assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) # Simple assertion that the query looks good assert expect_length == R.length(R.item_path(['data', result_name], result)) return result
def log_request_body(info, response_or_error): body = info.context._body.decode('utf-8') try: json_body = json.loads(body) ( logger.error if isinstance(response_or_error, ErrorType) else logger.debug )(f" User: {info.context.user} \n Action: {json_body['operationName']} \n Variables: {json_body['variables']} \n Body: {json_body['query']}", ) if hasattr(response_or_error, '_meta') and isinstance( response_or_error._meta, MutationOptions): # Just log top level types if isinstance(response_or_error, (Model)): mutation_response = json.dumps(R.omit( ['_state'], response_or_error.__dict__), sort_keys=True, indent=1, cls=MyDjangoJSONEncoder) logger.debug(f'Mutation returned {mutation_response}') elif isinstance(response_or_error, (BaseType)): try: mutation_response = json.dumps( R.omit(['_state'], response_or_error.__dict__), sort_keys=True, indent=1, ) logger.debug(f'Mutation returned {mutation_response}') except: logger.debug( f'Mutation returned {response_or_error.__class__}') else: if hasattr(response_or_error, 'objects'): count = response_or_error.objects.count() # Log up to 100 ids, don't log if it's a larger set because it might be a paging query ids = R.join(' ', [ '', 'having ids:', R.join( ', ', R.map(R.prop("id"), response_or_error.objects.values('id'))) ]) if count < 100 else "" logger.debug( f'Paginated Query Page {response_or_error.page} of page size {response_or_error.page_size} out of total pages {response_or_error.pages} returned {count} results{ids}' ) elif hasattr(response_or_error, 'count'): count = response_or_error.count() # Log up to 100 ids, don't log if it's a larger set because it might be a paging query ids = R.join(' ', [ '', 'having ids:', R.join(', ', R.map(R.prop("id"), response_or_error.values('id'))) ]) if count < 100 else "" logger.debug(f'Query returned {count} results{ids}') else: id = R.prop('id', response_or_error) logger.debug(f'Query returned single result {id}') except Exception as e: logging.error(body)
def assert_no_errors(result): """ Assert no graphql request errors :param result: The request Result :return: None """ assert not (R.prop_or(False, 'errors', result) and R.prop('errors', result)), R.dump_json( R.map(lambda e: format_error(e), R.dump_json(R.prop('errors', result))))
class GroupStateQuery(ObjectType): group_states = graphene.List( R.prop('graphene_class', group_state_config), **top_level_allowed_filter_arguments(R.prop('graphene_fields', group_state_config), R.prop('graphene_class', group_state_config)) ) @login_required def resolve_group_states(self, info, **kwargs): q_expressions = process_filter_kwargs(GroupState, **R.merge(dict(deleted__isnull=True), kwargs)) return R.prop('model_class', group_state_config).objects.filter( *q_expressions )
def index_sankey_graph(graph): """ Once all nodes are generated for a sankey graph the nodes need indices. This updates each node with and index property. Links also need the node indices, so each link gets source and target based on its source_node and target_node :param graph: :return: Updates graph.nodes, adding an index to each """ nodes = R.prop('nodes', graph) for (i, node) in enumerate(nodes): node['index'] = i for link in R.prop('links', graph): link['source'] = nodes.index(R.prop('source_node', link)) link['target'] = nodes.index(R.prop('target_node', link))
def test_update(self): values = dict(username="******", firstName='T', lastName='Rex', password=make_password("rrrrhhh", salt='not_random')) # Here is our create create_result = graphql_update_or_create_user(self.client, values) id = R.prop('id', R.item_path(['data', 'createUser', 'user'], create_result)) # Here is our update result = graphql_update_or_create_user( self.client, dict(id=id, firstName='Al', lastName="Lissaurus") ) assert not R.prop('errors', result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', result))) self.assertMatchSnapshot(R.omit_deep(omit_props, R.item_path(['data', 'updateUser', 'user'], result)))
def find_scope_instances(user_state_scope, new_data): """ Retrieve the scope instances to verify the Ids. Scope instances must have ids unless they are allowed to be created/updated during the userState mutation (such as searchLocations) :param new_data: The data to search :param user_state_scope Dict with 'pick' in the shape of the instances we are looking for in new_data, e.g. dict(userRegions={region: True}) to search new_data.userRegions[] for all occurrences of {region:...} and 'key' which indicates the actually key of the instance (e.g. 'region' for regions) :return: dict( instances=Instances actually in the database, ) """ def until(key, value): return key != R.prop('key', user_state_scope) return R.compose( lambda scope_dict: dict( # See which instances with ids are actually in the database # If any are missing we have an invalid update or need to create those instances if permitted instances=list( find_scope_instances_by_id(R.prop('model', user_state_scope), scope_dict['scope_ids'])), # The path from userRegions or userProjects to the scope instances, used to replace # a null update value with the existing values user_scope_path=list(R.keys(R.flatten_dct(user_state_scope, '.')))[ 0], **scope_dict), lambda scope_objs: dict( # Unique by id or accept if there is no id, this loses data, but it's just for validation scope_objs=R.unique_by( lambda obj: R.prop_or(str(now()), 'id', obj['value']), scope_objs), scope_ids=R.unique_by( R.identity, compact( R.map( lambda scope_obj: R.prop_or(None, 'id', scope_obj[ 'value']), scope_objs)))), # Use the pick key property to find the scope instances in the data # If we don't match anything we can get null or an empty item. Filter/compact these out R.filter(lambda obj: obj['value'] and (not isinstance( obj['value'], list) or R.length(obj['value']) != 0)), R.map(lambda pair: dict(key=pair[0], value=pair[1])), lambda flattened_data: R.to_pairs(flattened_data), lambda data: R.flatten_dct_until( R.pick_deep_all_array_items(R.prop('pick', user_state_scope), data ), until, '.'))(new_data)
def resolver_for_dict_list(resource, context, **kwargs): """ Resolver for the data field that is a list. This extracts the desired json fields from the context and creates a tuple of the field values. Graphene has no built in way for drilling into json types. The property value must be a list or null. Null values will return null, list values will be processed in turn by graphene :param resource: :param context: :params kwargs: Arguments to filter with :return: """ selections = resolve_selections(context) field_name = context.field_name # Value defaults to None. Empty is not the same as None value = R.prop(field_name, resource) if R.has(field_name, resource) else None return R.map( lambda data: pick_selections(selections, data), R.filter( # We only let this value through if it matches the kwargs # TODO data doesn't include full values for embedded model values, rather just {id: ...}. So if kwargs have # searches on other values of the model this will fail. The solution is to load the model values, but I # need some way to figure out where they are in data lambda data: R.dict_matches_params_deep(kwargs, data), value ) ) if value else value
def test_query(self): quiz_model_query( self.client, user_schema.graphql_query_users, 'users', dict(id=R.prop('id', self.user)) )
def test_query(self): quiz_model_query( self.client, R.prop('graphql_query', self.user_state_schema), 'userStates', # The query always queries by the current user dict())
def test_query_foo_with_null_geojson(self): # Query using for foos based on the related User foo_results = graphql_query_foos(self.client, variables=dict(key='fookit') ) assert not R.prop('errors', foo_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', foo_results))) assert 1 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'foos'], foo_results)))
def test_create(self): values = dict( name='Candy', region=dict(id=R.head(self.regions).id), data=R.merge( sample_settings, dict( material='Candy', raw_data=[ 'Other Global Imports;Shipments, location generalized;51.309933, 3.055030;Source;22,469,843', 'Knauf (Danilith) BE;Waregemseweg 156-142 9790 Wortegem-Petegem, Belgium;50.864762, 3.479308;Conversion;657,245', "MPRO Bruxelles;Avenue du Port 67 1000 Bruxelles, Belgium;50.867486, 4.352543;Distribution;18,632", 'Residential Buildings (all typologies);Everywhere in Brussels;NA;Demand;3,882,735', 'Duplex House Typology;Everywhere in Brussels;NA;Demand;13,544', 'Apartment Building Typology;Everywhere in Brussels;NA;Demand;34,643', 'New West Gypsum Recycling;9130 Beveren, Sint-Jansweg 9 Haven 1602, Kallo, Belgium;51.270229, 4.261048;Reconversion;87,565', 'Residential Buildings (all typologies);Everywhere in Brussels;NA;Sink;120,000', 'RecyPark South;1190 Forest, Belgium;50.810799, 4.314789;Sink;3,130', 'RecyPark Nord;Rue du Rupel, 1000 Bruxelles, Belgium;50.880181, 4.377136;Sink;1,162' ] ) ) ) result = graphql_update_or_create_resource(self.client, values) dump_errors(result) assert not R.has('errors', result), R.dump_json(R.prop('errors', result)) # look at the users added and omit the non-determinant dateJoined result_path_partial = R.item_path(['data', 'createResource', 'resource']) self.assertMatchSnapshot(R.omit(omit_props, result_path_partial(result)))
def test_update(self): # First add a new User margay = dict(username="******", first_name='Upa', last_name='Tree', password=make_password("merowgir", salt='not_random')) user = create_sample_user(margay) # Now assign regions and persist the UserState sample_user_state_data = dict( user=dict(id=user.id), data=form_sample_user_state_data( self.regions, self.projects, dict( userRegions=[ dict( # Assign the first region region=dict( key=R.prop('key', R.head(self.regions))), mapbox=dict(viewport=dict( latitude=50.5915, longitude=2.0165, zoom=7))) ], userProjects=[ dict( # Assign the first prjoect project=dict( key=R.prop('key', R.head(self.projects))), mapbox=dict(viewport=dict( latitude=50.5915, longitude=2.0165, zoom=7))) ]))) # Update the zoom of the first userRegion update_data = deepcopy(R.pick(['data'], sample_user_state_data)) R.item_str_path( 'mapbox.viewport', R.head(R.item_str_path('data.userRegions', (update_data))))['zoom'] = 15 result, update_result = quiz_model_mutation_update( self.client, R.prop('graphql_mutation', self.user_state_schema), 'createUserState.userState', 'updateUserState.userState', sample_user_state_data, update_data) versions = Version.objects.get_for_object( UserState.objects.get(id=R.item_str_path( 'data.updateUserState.userState.id', update_result))) assert len(versions) == 2
def mutate(self, info, foo_data=None): modified_foo_data = R.merge( # Make sure unique fields are enforced, here by incrementing foo.key enforce_unique_props(foo_fields, foo_data), dict( # Force the FeatureCollection geojson into the GEOSGeometryCollection. This is just Geometry geo_collection=ewkt_from_feature_collection( foo_data['geojson']) if R.prop('geojson', foo_data) else {}, # Put the full FeatureCollection geojson into the geojson field. geojson=foo_data['geojson'] if R.prop('geojson', foo_data) else {})) update_or_create_values = input_type_parameters_for_update_or_create( foo_fields, modified_foo_data) foo, created = update_or_create_with_revision(Foo, update_or_create_values) return UpsertFoo(foo=foo)
def test_create_user(self): values = dict(username="******", firstName='T', lastName='Rex', password=make_password("rrrrhhh", salt='not_random')) result = graphql_update_or_create_user(self.client, values) assert not R.prop('errors', result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', result))) # look at the users added and omit the non-determinant values self.assertMatchSnapshot( R.omit_deep(omit_props, R.item_path(['data', 'createUser', 'user'], result)))
def create_sample_resource(region, resource_dict): # Generate our sample resources, computing and storing their Sankey graph data graph = generate_sankey_data(resource_dict) data = R.merge(R.prop('data', resource_dict), dict(graph=graph)) # Save the resource with the complete dataa resource = Resource( **R.merge(resource_dict, dict(region=region, data=data))) resource.save() return resource
def quiz_model_mutation_create(client, graphql_update_or_create_function, result_path, values, second_create_results=None, second_create_does_update=False): """ Tests a create mutation for a model :param client: The Apollo Client :param graphql_update_or_create_function: The update or create mutation function for the model. Expects client and input values :param result_path: The path to the result of the create in the data object (e.g. createRegion.region) :param values: The input values to use for the create :param second_create_results: Object, tests a second create if specified. Use to make sure that create with the same values creates a new instance or updates, depending on what you expect it to do. The values of this should be regexes that match the created instance :param second_create_does_update: Default False. If True expects a second create with the same value to update rather than create a new instance :return: Tuple with two return values. The second is null if second_create_results is False """ result = graphql_update_or_create_function(client, values=values) result_path_partial = R.item_str_path(f'data.{result_path}') assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) # Get the created value, using underscore to make the camelcase keys match python keys created = R.map_keys(lambda key: underscore(key), result_path_partial(result)) # get all the keys in values that are in created. This should match values if created has everything we expect assert values == pick_deep(created, values) # Try creating with the same values again, unique constraints will apply to force a create or an update will occur if second_create_results: new_result = graphql_update_or_create_function(client, values) assert not R.has('errors', new_result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', new_result))) created_too = result_path_partial(new_result) if second_create_does_update: assert created['id'] == created_too['id'] if not second_create_does_update: assert created['id'] != created_too['id'] for path, value in R.flatten_dct(second_create_results, '.').items(): assert re.match(value, R.item_str_path_or(None, path, created_too)) else: new_result = None return result, new_result
def handleGrapheneTypes(key, value): """ Handle related Graphene types. This is recursive since it calls dump_grpahql_keys :param key: :param value: :return: """ return '''%s { %s }''' % (camelize(key, False), dump_graphql_keys(call_if_lambda(R.prop('fields', value))))
def test_query(self): user_results = graphql_query_users(self.client) format_error(R.prop('errors', user_results)[0]) assert not R.prop('errors', user_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', user_results))) assert 2 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'users'], user_results))) # Query using for foos based on the related User foo_results = graphql_query_foos( self.client, variables=dict( user=R.pick(['id'], self.lion.__dict__), # Test filters name_contains='oo', name_contains_not='jaberwaki' ) ) assert not R.prop('errors', foo_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', foo_results))) assert 1 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'foos'], foo_results))) # Make sure the Django instance in the json blob was resolved assert self.cat.id == R.item_path(['data', 'foos', 0, 'data', 'friend', 'id'], foo_results)
def create_query_mutation_schema(class_config): """ Creates a schema from defaults or allows overrides of any of these schemas Each arg if overridden must provide a dict with a query and mutation key, each pointing to the override query and mutation graphene.ObjectType :param class_config :param class_config.user_group: Handles User and Group queries and mutations (defined in rescape_graphene) :param class_config.user_group_state: Handles UserState and GroupState queries and mutations. See the default UserState and GroupState for an example :param class_config.region: Handles Region queries and mutations. See the default Region for an example :param class_config.project: Handles Project queries and mutations. See the default Project for an example :param class_config.location: Handles Location queries and mutations. See the default Location for an example :return: """ obj = create_query_and_mutation_classes(class_config) schema = Schema(query=R.prop('query', obj), mutation=R.prop('mutation', obj)) return dict(query=R.prop('query', obj), mutation=R.prop('mutation', obj), schema=schema)
def mutate(self, info, user_data=None): user_model = get_user_model() data = R.merge( user_data, dict(password=make_password(R.prop('password', user_data), salt='not_random')) if R.prop_or(False, 'password', user_data) else {}) update_or_create_values = input_type_parameters_for_update_or_create( user_fields, data) user, created = update_or_create_with_revision( user_model, update_or_create_values) return UpsertUser(user=user)
def mutate(self, info, group_data=None): group_model = Group() data = R.merge( group_data, dict(password=make_password(R.prop('password', group_data), salt='not_random')) if R.prop_or(False, 'password', group_data) else {}) update_or_create_values = input_type_parameters_for_update_or_create( group_fields, data) group, created = update_or_create_with_revision( group_model, update_or_create_values) return UpsertGroup(group=group)
def create_sample_location(cls, location_dict): # Save the location with the complete data if R.has('key', location_dict): # rescape_region uses a key for uniqueness return cls.objects.update_or_create(key=R.prop('key', location_dict), defaults=R.omit(['key'], location_dict))[0] else: # other implementors should delete duplicates first location = cls(**location_dict) location.save() return location
def resolve_paginated_for_type(paginated_type, type_resolver, **kwargs): """ Resolver for paginated types :param paginated_type: The paginated Type, e.g. LocationPaginationType :param type_resolver: The resolver for the non-paginated type, e.g. location_resolver :param kwargs: The kwargs Array of prop sets for the non-paginated objects in 'objects'. Normally it's just a 1-item array. Other required kwargs are for pagination are page_size and page and optional order_by :return: The paginated query """ def reduce_or(q_expressions): return R.reduce(lambda qs, q: qs | q if qs else q, None, q_expressions) objects = R.prop_or({}, 'objects', kwargs) instances = reduce_or( R.map(lambda obj: type_resolver('filter', **obj), objects)) return get_paginator(instances, R.prop('page_size', kwargs), R.prop('page', kwargs), paginated_type, R.prop('order_by', kwargs))
def convert_foreign_key_to_id(scope_obj): # Find ForeignKey attributes and map the class field name to the foreign key id field # E.g. region to region_id, user to user_id, etc converters = R.compose( R.from_pairs, R.map(lambda field: [field.name, field.attname]), R.filter(lambda field: R.isinstance(ForeignKey, field)))( model._meta.fields) # Convert scopo_obj[related_field] = {id: x} to scope_obj[related_field_id] = x return R.from_pairs( R.map_with_obj_to_values( lambda key, value: [converters[key], R.prop('id', value)] if R.has(key, converters) else [key, value], scope_obj))
def increment_prop_until_unique(django_class, strategy, prop, additional_filter_props, django_instance_data): """ Increments the given prop of the given django as given by data['prop'] until it matches nothing in the database. Note that this includes checks against soft deleted instances where the deleted prop is non-null (assumes the use of SafeDeleteModel on the model class) :param django_class: Django class to query :param prop: The prop to ensure uniqueness :param additional_filter_props: Other props, such as user id, to filter by. This allows incrementing a name dependent on the current user, for instance. This can be a dict or a function expecting the django_instance_data and returning a dict :param strategy: function to try to make a value unique. Expects all potential matching values--all values that begin with the value of the property--the prop value, and the current index. It's called for each matching value to guarentee the strategy will eventually get a unique value. For instance, if prop is key and it equals 'foo', and 'foo', 'foo1', 'foo2', and 'foo3' are in the db, strategy will be called with an array of 4 values 4 times, with index 0 through 3. If strategy is None the default strategy is to append index+1 to the duplicate name :param django_instance_data: The data containing the prop :return: The data merged with the uniquely named prop """ prop_value = R.prop(prop, django_instance_data) pk = R.prop_or(None, 'id', django_instance_data) strategy = strategy or default_strategy # Include deleted objects here. It's up to additional_filter_props to deal with the deleted=date|None property all_objects = django_class.all_objects if R.has('all_objects', django_class) else django_class.objects matching_values = all_objects.filter( # Ignore value matching the pk if this is an update operation. # In other words we can update the key to what it already is, aka do nothing *R.compact([ ~Q(id=pk) if pk else None, ]), **R.merge( {'%s__startswith' % prop: prop_value}, # Give the filter props the instance f they are a function R.when( lambda f: inspect.isfunction(f), lambda f: f(django_instance_data) )(additional_filter_props or {}) ) ).values_list(prop, flat=True).order_by(prop) success = prop_value for i, matching_key in enumerate(matching_values): success = None attempt = strategy(matching_values, prop_value, i) if attempt not in matching_values: success = attempt break if not success: raise Exception("Could not generate unique prop value %s. The following matching ones exist %s" % ( prop_value, matching_values)) return R.merge(django_instance_data, {prop: success})
def mutate(self, info, search_location_data=None): deleted_search_location_response = delete_if_marked_for_delete( SearchLocation, UpsertSearchLocation, 'search_location', search_location_data) if deleted_search_location_response: return deleted_search_location_response modified_search_location_data = R.compose( # Make sure that all props are unique that must be, either by modifying values or erring. lambda data: enforce_unique_props(search_location_fields, data), # Remove the many to many values. They are saved separately lambda data: R.omit(['jurisdictions'], data))(search_location_data) update_or_create_values = input_type_parameters_for_update_or_create( search_location_fields, modified_search_location_data) search_location, created = update_or_create_with_revision( SearchLocation, update_or_create_values) # SearchJurisdictions can be created during the creation of search_locations if R.prop_or(False, 'jurisdictions', search_location_data): existing_search_intersections_by_id = R.index_by( R.prop('id'), search_location.jurisdictions.all()) for search_jurisdiction_unsaved in R.prop('intersections', search_location_data): # existing instances have an id search_jursidiction_id = R.prop_or( None, 'id', search_jurisdiction_unsaved) search_jurisdiction, created = update_or_create_with_revision( SearchJurisdiction, R.merge( R.prop(search_jursidiction_id, existing_search_intersections_by_id) if search_jursidiction_id else {}, search_jurisdiction_unsaved)) # Once saved, add it to the search location search_location.jurisdictions.set(search_jurisdiction) return UpsertSearchLocation(search_location=search_location)
def quiz_model_mutation_update(client, graphql_update_or_create_function, create_path, update_path, values, update_values): """ Tests an update mutation for a model by calling a create with the given values then an update with the given update_values (plus the create id) :param client: The Apollo Client :param graphql_update_or_create_function: The update or create mutation function for the model. Expects client and input values :param create_path: The path to the result of the create in the data object (e.g. createRegion.region) :param update_path: The path to the result of the update in the data object (e.g. updateRegion.region) :param values: The input values to use for the create :param update_values: The input values to use for the update. This can be as little as one key value :return: """ result = graphql_update_or_create_function(client, values=values) assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) # Extract the result and map the graphql keys to match the python keys created = R.compose( lambda r: R.map_keys(lambda key: underscore(key), r), lambda r: R.item_str_path(f'data.{create_path}', r))(result) # look at the users added and omit the non-determinant dateJoined assert values == pick_deep(created, values) # Update with the id and optionally key if there is one + update_values update_result = graphql_update_or_create_function( client, R.merge_all([ dict(id=created['id']), dict(key=created['key']) if R.prop_or(False, 'key', created) else {}, update_values ])) assert not R.has('errors', update_result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', update_result))) updated = R.item_str_path(f'data.{update_path}', update_result) assert created['id'] == updated['id'] assert update_values == pick_deep(update_values, updated) return result, update_result