Ejemplo n.º 1
0
def dump_graphql_keys(dct):
    """
        Convert a dict to a graphql input parameter keys in the form
        Also camelizes keys if the are slugs and handles complex types. If a value has read=IGNORE it is omitted
        key1
        key2
        key3
        key4 {
            subkey1
            ...
        }
        ...
    :param dct: keyed by field
    :return:
    """
    from rescape_graphene.graphql_helpers.schema_helpers import IGNORE, DENY
    return R.join('\n', R.values(R.map_with_obj(
        dump_graphene_type,
        R.filter_dict(
            lambda key_value: not R.compose(
                lambda v: R.contains(v, [IGNORE, DENY]),
                lambda v: R.prop_or(None, 'read', v)
            )(key_value[1]),
            dct
        )
    )))
def pick_selections(selections, data):
    """
        Pick the selections from the current data
    :param {[Sting]} selections: The field names to that are in the query
    :param {dict} data: Data to pick from
    :return: {DataTuple} data with limited to selections
    """
    dct = R.pick(selections, data)
    return namedtuple('DataTuple', R.keys(dct))(*R.values(dct))
Ejemplo n.º 3
0
    def accumulate_nodes(accum, raw_node, i):
        """
            Accumulate each node, keying by the name of the node's stage key
            Since nodes share stage keys these each result is an array of nodes
        :param accum:
        :param raw_node:
        :param i:
        :return:
        """
        location_obj = resolve_coordinates(default_location, R.prop_or(None, location_key, raw_node), i)
        location = R.prop('location', location_obj)
        is_generalized = R.prop('isGeneralized', location_obj)
        # The key where then node is stored is the stage key
        node_stage = raw_node[stage_key]
        # Get key from name or it's already a key
        key = R.prop('key', R.prop_or(dict(key=node_stage), node_stage, stage_by_name))

        # Copy all properties from resource.data  except settings and raw_data
        # Also grab raw_node properties
        # This is for arbitrary properties defined in the data
        # We put them in properties and propertyValues since graphql hates arbitrary key/values
        properties = R.merge(
            R.omit(['settings', 'rawData'], R.prop('data', resource)),
            raw_node
        )
        properties[node_name_key] = humanize(properties[node_name_key])
        return R.merge(
            # Omit accum[key] since we'll concat it with the new node
            R.omit([key], accum),
            {
                # concat accum[key] or [] with the new node
                key: R.concat(
                    R.prop_or([], key, accum),
                    # Note that the value is an array so we can combine nodes with the same stage key
                    [
                        dict(
                            value=string_to_float(R.prop(value_key, raw_node)),
                            type='Feature',
                            geometry=dict(
                                type='Point',
                                coordinates=location
                            ),
                            name=R.prop(node_name_key, raw_node),
                            isGeneralized=is_generalized,
                            properties=list(R.keys(properties)),
                            propertyValues=list(R.values(properties))
                        )
                    ]
                )
            }
        )
def resolver_for_feature_collection(resource, context, **kwargs):
    """
        Like resolver but takes care of converting the geos value stored in the field to a dict that
        has the values we want to resolve, namely type and features.
    :param {string} resource: The instance whose json field data is being resolved
    :param {ResolveInfo} context: Graphene context which contains the fields queried in field_asts
    :return: {DataTuple} Standard resolver return value
    """

    # Take the camelized keys. We don't store data fields slugified. We leave them camelized
    selections = R.map(lambda sel: sel.name.value, context.field_asts[0].selection_set.selections)
    # Recover the json by parsing the string provided by GeometryCollection and mapping the geometries property to features
    json = R.compose(
        # Map the value GeometryCollection to FeatureCollection for the type property
        R.map_with_obj(lambda k, v: R.if_else(
            R.equals('type'),
            R.always('FeatureCollection'),
            R.always(v)
        )(k)),
        # Map geometries to features: [{type: Feature, geometry: geometry}]
        lambda dct: R.merge(
            # Remove geometries
            R.omit(['geometries'], dct),
            # Add features containing the geometries
            dict(features=R.map(
                lambda geometry: dict(type='Feature', geometry=geometry),
                R.prop_or([], 'geometries', dct))
            )
        ),
    )(ast.literal_eval(R.prop(context.field_name, resource).json))
    # Identify the keys that are actually in resource[json_field_name]
    all_selections = R.filter(
        lambda key: key in json,
        selections
    )
    # Pick out the values that we want
    result = R.pick(all_selections, json)

    # Return in the standard Graphene DataTuple
    return namedtuple('DataTuple', R.keys(result))(*R.values(result))
Ejemplo n.º 5
0
        def mutate(self, info, user_state_data=None):
            """
                Update or create the user state
            :param info:
            :param user_state_data:
            :return:
            """

            # Check that all the scope instances in user_state.data exist. We permit deleted instances for now.
            new_data = R.prop_or({}, 'data', user_state_data)
            # Copy since Graphene reuses this data
            copied_new_data = copy.deepcopy(new_data)
            old_user_state_data = UserState.objects.get(
                id=user_state_data['id']
            ).data if R.prop_or(None, 'id', user_state_data) else None

            # Inspect the data and find all scope instances within UserState.data
            # This includes userRegions[*].region, userProject[*].project and within userRegions and userProjects
            # userSearch.userSearchLocations[*].search_location and whatever the implementing libraries define
            # in addition
            updated_new_data = validate_and_mutate_scope_instances(
                user_state_scope_instances_config,
                copied_new_data
            )

            # If either userProjects or userRegions are null, it means those scope instances aren't part
            # of the update, so merge in the old values
            if R.prop_or(None, 'id', user_state_data) and R.any_satisfy(
                    lambda user_scope_key: not R.prop_or(None, user_scope_key, updated_new_data),
                    ['userProjects', 'userRegions']
            ):
                # The special update case where one userScope collection is null,
                # indicates that we are only updating one userScope object. The rest
                # should remain the same and not be removed
                for user_scope_key in ['userProjects', 'userRegions']:
                    # Database values
                    old_user_scopes_by_id = user_scope_instances_by_id(
                        user_scope_key,
                        old_user_state_data
                    )
                    # New values with updates applied
                    new_user_scopes_by_id = user_scope_instances_by_id(
                        user_scope_key,
                        updated_new_data
                    )
                    # Prefer the old over the new, merging all objects but overriding lists
                    # We override lists because a non-null list always replaces the old list in the database
                    updated_new_data[user_scope_key] = R.values(R.merge_deep(
                        old_user_scopes_by_id,
                        new_user_scopes_by_id,
                        MyMerger(
                            # pass in a list of tuples,with the
                            # strategies you are looking to apply
                            # to each type.
                            [
                                (list, ["override_non_null"]),
                                (dict, ["merge"])
                            ],
                            # next, choose the fallback strategies,
                            # applied to all other types:
                            ["override"],
                            # finally, choose the strategies in
                            # the case where the types conflict:
                            ["override"]
                        )
                    ))

            # Update user_state_data the updated data
            modified_user_state_data = R.merge(user_state_data, dict(data=updated_new_data))

            # id or user.id can be used to identify the existing instance
            id_props = R.compact_dict(
                dict(
                    id=R.prop_or(None, 'id', modified_user_state_data),
                    user_id=R.item_str_path_or(None, 'user.id', modified_user_state_data)
                )
            )

            def fetch_and_merge(modified_user_state_data, props):
                existing = UserState.objects.filter(**props)
                # If the user doesn't have a user state yet
                if not R.length(existing):
                    return modified_user_state_data

                return merge_data_fields_on_update(
                    ['data'],
                    R.head(existing),
                    # Merge existing's id in case it wasn't in user_state_data
                    R.merge(modified_user_state_data, R.pick(['id'], existing))
                )

            modified_data = R.if_else(
                R.compose(R.length, R.keys),
                lambda props: fetch_and_merge(modified_user_state_data, props),
                lambda _: modified_user_state_data
            )(id_props)

            update_or_create_values = input_type_parameters_for_update_or_create(
                user_state_fields,
                # Make sure that all props are unique that must be, either by modifying values or erring.
                enforce_unique_props(
                    user_state_fields,
                    modified_data)
            )

            user_state, created = update_or_create_with_revision(UserState, update_or_create_values)
            return UpsertUserState(user_state=user_state)
Ejemplo n.º 6
0
def generate_sankey_data(resource):
    """
        Generates nodes and links for the given Resrouce object
    :param resource:  Resource object
    :return: A dict containing nodes and links. nodes are a dict key by stage name
        Results can be assigned to resource.data.sankey and saved
    """

    settings = R.item_path(['data', 'settings'], resource)
    stages = R.prop('stages', settings)
    stage_key = R.prop('stage_key', settings)
    value_key = R.prop('value_key', settings)
    location_key = R.prop('location_key', settings)
    node_name_key = R.prop('node_name_key', settings)
    default_location = R.prop('default_location', settings)
    # A dct of stages by name
    stage_by_name = stages_by_name(stages)

    def accumulate_nodes(accum, raw_node, i):
        """
            Accumulate each node, keying by the name of the node's stage key
            Since nodes share stage keys these each result is an array of nodes
        :param accum:
        :param raw_node:
        :param i:
        :return:
        """
        location_obj = resolve_location(default_location,
                                        R.prop(location_key, raw_node), i)
        location = R.prop('location', location_obj)
        is_generalized = R.prop('is_generalized', location_obj)
        # The key where then node is stored is the stage key
        key = R.prop('key', stage_by_name[raw_node[stage_key]])

        # Copy all properties from resource.data  except settings and raw_data
        # Also grab raw_node properties
        # This is for arbitrary properties defined in the data
        # We put them in properties and property_values since graphql hates arbitrary key/values
        properties = R.merge(
            R.omit(['settings', 'raw_data'], R.prop('data', resource)),
            raw_node)
        return R.merge(
            # Omit accum[key] since we'll concat it with the new node
            R.omit([key], accum),
            {
                # concat accum[key] or [] with the new node
                key:
                R.concat(
                    R.prop_or([], key, accum),
                    # Note that the value is an array so we can combine nodes with the same stage key
                    [
                        dict(value=string_to_float(R.prop(value_key,
                                                          raw_node)),
                             type='Feature',
                             geometry=dict(type='Point', coordinates=location),
                             name=R.prop(node_name_key, raw_node),
                             is_generalized=is_generalized,
                             properties=list(R.keys(properties)),
                             property_values=list(R.values(properties)))
                    ])
            })

    raw_nodes = create_raw_nodes(resource)
    # Reduce the nodes
    nodes_by_stage = R.reduce(
        lambda accum, i_and_node: accumulate_nodes(accum, i_and_node[1],
                                                   i_and_node[0]), {},
        enumerate(raw_nodes))
    nodes = R.flatten(R.values(nodes_by_stage))
    return dict(nodes=nodes,
                nodes_by_stage=nodes_by_stage,
                links=create_links(stages, value_key, nodes_by_stage))
Ejemplo n.º 7
0
def generate_sankey_data(resource):
    """
        Generates nodes and links for the given Resouce object
    :param resource:  Resource object
    :return: A dict containing nodes and links. nodes are a dict key by stage name
        Results can be assigned to resource.data.sankey and saved
    """

    settings = R.item_path(['data', 'settings'], resource)
    stages = R.prop('stages', settings)
    stage_key = R.prop('stageKey', settings)
    value_key = R.prop('valueKey', settings)
    location_key = R.prop('locationKey', settings)
    node_name_key = R.prop('nodeNameKey', settings)
    node_color_key = R.prop_or(None, 'nodeColorKey', settings)
    default_location = R.prop('defaultLocation', settings)
    delineator = R.prop_or(';', 'delineator', settings)
    # A dct of stages by name
    stage_by_name = stages_by_name(stages)

    link_start_node_key = R.prop_or(None, 'linkStartNodeKey', settings)
    link_end_node_key = R.prop_or(None, 'linkEndNodeKey', settings)
    link_value_key = R.prop_or(None, 'linkValueKey', settings)
    link_color_key = R.prop_or(None, 'linkColorKey', settings)

    def accumulate_nodes(accum, raw_node, i):
        """
            Accumulate each node, keying by the name of the node's stage key
            Since nodes share stage keys these each result is an array of nodes
        :param accum:
        :param raw_node:
        :param i:
        :return:
        """
        location_obj = resolve_coordinates(default_location, R.prop_or(None, location_key, raw_node), i)
        location = R.prop('location', location_obj)
        is_generalized = R.prop('isGeneralized', location_obj)
        # The key where then node is stored is the stage key
        node_stage = raw_node[stage_key]
        # Get key from name or it's already a key
        key = R.prop('key', R.prop_or(dict(key=node_stage), node_stage, stage_by_name))

        # Copy all properties from resource.data  except settings and raw_data
        # Also grab raw_node properties
        # This is for arbitrary properties defined in the data
        # We put them in properties and propertyValues since graphql hates arbitrary key/values
        properties = R.merge(
            R.omit(['settings', 'rawData'], R.prop('data', resource)),
            raw_node
        )
        properties[node_name_key] = humanize(properties[node_name_key])
        return R.merge(
            # Omit accum[key] since we'll concat it with the new node
            R.omit([key], accum),
            {
                # concat accum[key] or [] with the new node
                key: R.concat(
                    R.prop_or([], key, accum),
                    # Note that the value is an array so we can combine nodes with the same stage key
                    [
                        dict(
                            value=string_to_float(R.prop(value_key, raw_node)),
                            type='Feature',
                            geometry=dict(
                                type='Point',
                                coordinates=location
                            ),
                            name=R.prop(node_name_key, raw_node),
                            isGeneralized=is_generalized,
                            properties=list(R.keys(properties)),
                            propertyValues=list(R.values(properties))
                        )
                    ]
                )
            }
        )

    raw_nodes = create_raw_nodes(delineator, resource)

    # Reduce the nodes
    nodes_by_stage = R.reduce(
        lambda accum, i_and_node: accumulate_nodes(accum, i_and_node[1], i_and_node[0]),
        {},
        enumerate(raw_nodes)
    )
    nodes = R.flatten(R.values(nodes_by_stage))
    # See if there are explicit links
    if R.item_path_or(False, ['data', 'settings', 'link_start_node_key'], resource):
        raw_links = create_raw_links(delineator, resource)
        node_key_key = R.prop('nodeNameKey', settings)
        nodes_by_key = R.from_pairs(R.map(
            lambda node: [prop_lookup(node, node_key_key), node],
            nodes
        ))
        links = R.map(
            lambda link: dict(
                source_node=nodes_by_key[link[link_start_node_key]],
                target_node=nodes_by_key[link[link_end_node_key]],
                value=link[link_value_key],
                color=R.prop_or(None, link_color_key, link)
            ),
            raw_links
        )
    else:
        # Guess links from nodes and stages
        links = create_links(stages, value_key, nodes_by_stage)
    return dict(
        nodes=nodes,
        nodes_by_stage=nodes_by_stage,
        # We might have explicit links or have to generate all possible based on the nodes
        links=links
    )