def create_raw_nodes(resource): """ Creates nodes for each column from the csv :param resource: The Resource object :return: Raw node data """ columns = R.item_path(['data', 'settings', 'columns'], resource) raw_data = R.item_path(['data', 'raw_data'], resource) return R.map(lambda line: R.from_pairs(zip(columns, line.split(';'))), raw_data)
def test_update(self): values = dict(username="******", firstName='T', lastName='Rex', password=make_password("rrrrhhh", salt='not_random')) # Here is our create create_result = graphql_update_or_create_user(self.client, values) id = R.prop('id', R.item_path(['data', 'createUser', 'user'], create_result)) # Here is our update result = graphql_update_or_create_user( self.client, dict(id=id, firstName='Al', lastName="Lissaurus") ) assert not R.prop('errors', result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', result))) self.assertMatchSnapshot(R.omit_deep(omit_props, R.item_path(['data', 'updateUser', 'user'], result)))
def quiz_model_query(client, model_query_function, result_name, variables, expect_length=1): """ Tests a query for a model with variables that produce exactly one result :param client: Apollo client :param model_query_function: Query function expecting the client and variables :param result_name: The name of the result object in the data object :param variables: key value variables for the query :param expect_length: Default 1. Optional number items to expect :return: returns the result for further assertions """ all_result = model_query_function(client) assert not R.has('errors', all_result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', all_result))) result = model_query_function(client, variables=variables) # Check against errors assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) # Simple assertion that the query looks good assert expect_length == R.length(R.item_path(['data', result_name], result)) return result
def test_create(self): values = dict( name='Candy', region=dict(id=R.head(self.regions).id), data=R.merge( sample_settings, dict( material='Candy', raw_data=[ 'Other Global Imports;Shipments, location generalized;51.309933, 3.055030;Source;22,469,843', 'Knauf (Danilith) BE;Waregemseweg 156-142 9790 Wortegem-Petegem, Belgium;50.864762, 3.479308;Conversion;657,245', "MPRO Bruxelles;Avenue du Port 67 1000 Bruxelles, Belgium;50.867486, 4.352543;Distribution;18,632", 'Residential Buildings (all typologies);Everywhere in Brussels;NA;Demand;3,882,735', 'Duplex House Typology;Everywhere in Brussels;NA;Demand;13,544', 'Apartment Building Typology;Everywhere in Brussels;NA;Demand;34,643', 'New West Gypsum Recycling;9130 Beveren, Sint-Jansweg 9 Haven 1602, Kallo, Belgium;51.270229, 4.261048;Reconversion;87,565', 'Residential Buildings (all typologies);Everywhere in Brussels;NA;Sink;120,000', 'RecyPark South;1190 Forest, Belgium;50.810799, 4.314789;Sink;3,130', 'RecyPark Nord;Rue du Rupel, 1000 Bruxelles, Belgium;50.880181, 4.377136;Sink;1,162' ] ) ) ) result = graphql_update_or_create_resource(self.client, values) dump_errors(result) assert not R.has('errors', result), R.dump_json(R.prop('errors', result)) # look at the users added and omit the non-determinant dateJoined result_path_partial = R.item_path(['data', 'createResource', 'resource']) self.assertMatchSnapshot(R.omit(omit_props, result_path_partial(result)))
def test_query(self): all_result = graphql_query_resources(self.client) assert not R.has('errors', all_result), R.dump_json(R.prop('errors', all_result)) results = graphql_query_resources(self.client, dict(name='String'), variable_values=dict(name='Minerals')) # Check against errors assert not R.has('errors', results), R.dump_json(R.prop('errors', results)) assert 1 == R.length(R.item_path(['data', 'resources'], results))
def test_query_foo_with_null_geojson(self): # Query using for foos based on the related User foo_results = graphql_query_foos(self.client, variables=dict(key='fookit') ) assert not R.prop('errors', foo_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', foo_results))) assert 1 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'foos'], foo_results)))
def accumulate_sankey_graph(accumulated_graph, resource): """ Given an accumulated graph and and a current Resource object, process the resource object and add the results to the accumulated graph :param accumulated_graph: :param resource: A Resource :return: """ links = R.item_path(['graph', 'links'], resource.data) nodes = R.item_path(['graph', 'nodes'], resource.data) # Combine the nodes and link with previous accumulated_graph nodes and links return dict( nodes=R.concat(R.prop_or([], 'nodes', accumulated_graph), nodes), # Naively create a link between every node of consecutive stages links=R.concat(R.prop_or([], 'links', accumulated_graph), links))
def test_create_user(self): values = dict(username="******", firstName='T', lastName='Rex', password=make_password("rrrrhhh", salt='not_random')) result = graphql_update_or_create_user(self.client, values) assert not R.prop('errors', result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', result))) # look at the users added and omit the non-determinant values self.assertMatchSnapshot( R.omit_deep(omit_props, R.item_path(['data', 'createUser', 'user'], result)))
def test_query(self): user_results = graphql_query_users(self.client) format_error(R.prop('errors', user_results)[0]) assert not R.prop('errors', user_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', user_results))) assert 2 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'users'], user_results))) # Query using for foos based on the related User foo_results = graphql_query_foos( self.client, variables=dict( user=R.pick(['id'], self.lion.__dict__), # Test filters name_contains='oo', name_contains_not='jaberwaki' ) ) assert not R.prop('errors', foo_results), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', foo_results))) assert 1 == R.length(R.map(R.omit_deep(omit_props), R.item_path(['data', 'foos'], foo_results))) # Make sure the Django instance in the json blob was resolved assert self.cat.id == R.item_path(['data', 'foos', 0, 'data', 'friend', 'id'], foo_results)
def create_raw_links(delineator, resource): """ Creates links from the csv if present :param resource: The Resource object :return: Raw node data """ columns = R.item_path(['data', 'settings', 'columns'], resource) raw_data = R.item_path(['data', 'rawData'], resource) # Sometimes we split nodes and edges in the raw data into # dict(nodes=..., edges=...). Sometimes the raw data is just nodes # and we get the edges from the node data raw_links = R.prop_or(None, 'links', raw_data) return R.map( lambda line: R.from_pairs( zip( columns, R.map(lambda s: s.strip(), line.split(delineator)) ) ), raw_links ) if raw_links else raw_data
def quiz_model_paginated_query(client, model_class, paginated_query, result_name, page_count_expected, props, omit_props, order_by=None, page_size=1): """ Tests a pagination query for a model with variables :param client: Apollo client :param model_class: Model class :param paginated_query: Model's pagination query :param page_count_expected: The number of pages expected when the page_size is 1, in other words the number of items in the database that match props :param result_name: The name of the results in data.[result_name].objects :param props: The props to query, not including pagination :param omit_props: Props to omit from assertions because they are nondeterminate :param order_by: Order by page-level prop :param page_size: Default 1 :return the first result (first page) and final result (last page) for further testing: """ result = paginated_query(client, variables=dict(page=1, page_size=page_size, order_by=order_by, objects=R.to_array_if_not(props))) # Check against errors assert not R.has('errors', result), R.dump_json( R.map(lambda e: format_error(e), R.prop('errors', result))) first_page_objects = R.item_path(['data', result_name, 'objects'], result) # Assert we got 1 result because our page is size 1 assert page_size == R.compose( R.length, R.map(R.omit(omit_props)), )(first_page_objects) remaining_ids = list( set( R.map( R.prop('id'), model_class.objects.filter(*process_filter_kwargs( model_class, **R.map_keys(underscore, props))).order_by( *order_by.split(',')))) - set(R.map(R.compose(int, R.prop('id')), first_page_objects))) page_info = R.item_path(['data', result_name], result) # We have page_size pages so there should be a total number of pages # of what we specified for page_count_expected assert page_info['pages'] == page_count_expected assert page_info['hasNext'] == True assert page_info['hasPrev'] == False # Get the final page new_result = paginated_query(client, variables=dict( page=page_count_expected, page_size=page_info['pageSize'], order_by=order_by, objects=R.to_array_if_not(props))) # Make sure the new_result matches one of the remaining ids assert R.contains( R.item_path(['data', result_name, 'objects', 0, 'id'], new_result), remaining_ids) new_page_info = R.item_path(['data', result_name], new_result) # Still expect the same page count assert new_page_info['pages'] == page_count_expected # Make sure it's the last page assert new_page_info['hasNext'] == False assert new_page_info['hasPrev'] == True return [result, new_result]
def generate_sankey_data(resource): """ Generates nodes and links for the given Resrouce object :param resource: Resource object :return: A dict containing nodes and links. nodes are a dict key by stage name Results can be assigned to resource.data.sankey and saved """ settings = R.item_path(['data', 'settings'], resource) stages = R.prop('stages', settings) stage_key = R.prop('stage_key', settings) value_key = R.prop('value_key', settings) location_key = R.prop('location_key', settings) node_name_key = R.prop('node_name_key', settings) default_location = R.prop('default_location', settings) # A dct of stages by name stage_by_name = stages_by_name(stages) def accumulate_nodes(accum, raw_node, i): """ Accumulate each node, keying by the name of the node's stage key Since nodes share stage keys these each result is an array of nodes :param accum: :param raw_node: :param i: :return: """ location_obj = resolve_location(default_location, R.prop(location_key, raw_node), i) location = R.prop('location', location_obj) is_generalized = R.prop('is_generalized', location_obj) # The key where then node is stored is the stage key key = R.prop('key', stage_by_name[raw_node[stage_key]]) # Copy all properties from resource.data except settings and raw_data # Also grab raw_node properties # This is for arbitrary properties defined in the data # We put them in properties and property_values since graphql hates arbitrary key/values properties = R.merge( R.omit(['settings', 'raw_data'], R.prop('data', resource)), raw_node) return R.merge( # Omit accum[key] since we'll concat it with the new node R.omit([key], accum), { # concat accum[key] or [] with the new node key: R.concat( R.prop_or([], key, accum), # Note that the value is an array so we can combine nodes with the same stage key [ dict(value=string_to_float(R.prop(value_key, raw_node)), type='Feature', geometry=dict(type='Point', coordinates=location), name=R.prop(node_name_key, raw_node), is_generalized=is_generalized, properties=list(R.keys(properties)), property_values=list(R.values(properties))) ]) }) raw_nodes = create_raw_nodes(resource) # Reduce the nodes nodes_by_stage = R.reduce( lambda accum, i_and_node: accumulate_nodes(accum, i_and_node[1], i_and_node[0]), {}, enumerate(raw_nodes)) nodes = R.flatten(R.values(nodes_by_stage)) return dict(nodes=nodes, nodes_by_stage=nodes_by_stage, links=create_links(stages, value_key, nodes_by_stage))
def test_create_foo(self): values = dict( name='Luxembourg', key='luxembourg', user=dict(id=self.lion.id), data=dict( example=1.5, friend=dict(id=self.lion.id) # self love ), geojson={ 'type': 'FeatureCollection', 'generator': 'Open Street Map', 'copyright': '2018', 'features': [ { "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [[49.5294835476, 2.51357303225], [51.4750237087, 2.51357303225], [51.4750237087, 6.15665815596], [49.5294835476, 6.15665815596], [49.5294835476, 2.51357303225]]] }, }, { "type": "Feature", "id": "node/367331193", "properties": { "type": "node", "id": 367331193, "tags": { }, "relations": [ ], "meta": { } }, "geometry": { "type": "Point", "coordinates": [ 5.7398201, 58.970167 ] } } ] } ) result = graphql_update_or_create_foo(self.client, values) result_path_partial = R.item_path(['data', 'createFoo', 'foo']) assert not R.prop('errors', result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', result))) created = result_path_partial(result) # look at the Foo added and omit the non-determinant dateJoined self.assertMatchSnapshot(R.omit_deep(omit_props, created)) # Try creating the same Foo again, because of the unique constraint on key and the unique_with property # on its field definition value, it will increment to luxembourg1 new_result = graphql_update_or_create_foo(self.client, values) assert not R.prop('errors', new_result), R.dump_json(R.map(lambda e: format_error(e), R.prop('errors', new_result))) created_too = result_path_partial(new_result) assert created['id'] != created_too['id'] assert created_too['key'].startswith('luxembourg') and created_too['key'] != 'luxembourg'
def generate_sankey_data(resource): """ Generates nodes and links for the given Resouce object :param resource: Resource object :return: A dict containing nodes and links. nodes are a dict key by stage name Results can be assigned to resource.data.sankey and saved """ settings = R.item_path(['data', 'settings'], resource) stages = R.prop('stages', settings) stage_key = R.prop('stageKey', settings) value_key = R.prop('valueKey', settings) location_key = R.prop('locationKey', settings) node_name_key = R.prop('nodeNameKey', settings) node_color_key = R.prop_or(None, 'nodeColorKey', settings) default_location = R.prop('defaultLocation', settings) delineator = R.prop_or(';', 'delineator', settings) # A dct of stages by name stage_by_name = stages_by_name(stages) link_start_node_key = R.prop_or(None, 'linkStartNodeKey', settings) link_end_node_key = R.prop_or(None, 'linkEndNodeKey', settings) link_value_key = R.prop_or(None, 'linkValueKey', settings) link_color_key = R.prop_or(None, 'linkColorKey', settings) def accumulate_nodes(accum, raw_node, i): """ Accumulate each node, keying by the name of the node's stage key Since nodes share stage keys these each result is an array of nodes :param accum: :param raw_node: :param i: :return: """ location_obj = resolve_coordinates(default_location, R.prop_or(None, location_key, raw_node), i) location = R.prop('location', location_obj) is_generalized = R.prop('isGeneralized', location_obj) # The key where then node is stored is the stage key node_stage = raw_node[stage_key] # Get key from name or it's already a key key = R.prop('key', R.prop_or(dict(key=node_stage), node_stage, stage_by_name)) # Copy all properties from resource.data except settings and raw_data # Also grab raw_node properties # This is for arbitrary properties defined in the data # We put them in properties and propertyValues since graphql hates arbitrary key/values properties = R.merge( R.omit(['settings', 'rawData'], R.prop('data', resource)), raw_node ) properties[node_name_key] = humanize(properties[node_name_key]) return R.merge( # Omit accum[key] since we'll concat it with the new node R.omit([key], accum), { # concat accum[key] or [] with the new node key: R.concat( R.prop_or([], key, accum), # Note that the value is an array so we can combine nodes with the same stage key [ dict( value=string_to_float(R.prop(value_key, raw_node)), type='Feature', geometry=dict( type='Point', coordinates=location ), name=R.prop(node_name_key, raw_node), isGeneralized=is_generalized, properties=list(R.keys(properties)), propertyValues=list(R.values(properties)) ) ] ) } ) raw_nodes = create_raw_nodes(delineator, resource) # Reduce the nodes nodes_by_stage = R.reduce( lambda accum, i_and_node: accumulate_nodes(accum, i_and_node[1], i_and_node[0]), {}, enumerate(raw_nodes) ) nodes = R.flatten(R.values(nodes_by_stage)) # See if there are explicit links if R.item_path_or(False, ['data', 'settings', 'link_start_node_key'], resource): raw_links = create_raw_links(delineator, resource) node_key_key = R.prop('nodeNameKey', settings) nodes_by_key = R.from_pairs(R.map( lambda node: [prop_lookup(node, node_key_key), node], nodes )) links = R.map( lambda link: dict( source_node=nodes_by_key[link[link_start_node_key]], target_node=nodes_by_key[link[link_end_node_key]], value=link[link_value_key], color=R.prop_or(None, link_color_key, link) ), raw_links ) else: # Guess links from nodes and stages links = create_links(stages, value_key, nodes_by_stage) return dict( nodes=nodes, nodes_by_stage=nodes_by_stage, # We might have explicit links or have to generate all possible based on the nodes links=links )