def pick_selections(selections, data):
    """
        Pick the selections from the current data
    :param {[Sting]} selections: The field names to that are in the query
    :param {dict} data: Data to pick from
    :return: {DataTuple} data with limited to selections
    """
    dct = R.pick(selections, data)
    return namedtuple('DataTuple', R.keys(dct))(*R.values(dct))
示例#2
0
    def accumulate_nodes(accum, raw_node, i):
        """
            Accumulate each node, keying by the name of the node's stage key
            Since nodes share stage keys these each result is an array of nodes
        :param accum:
        :param raw_node:
        :param i:
        :return:
        """
        location_obj = resolve_coordinates(default_location, R.prop_or(None, location_key, raw_node), i)
        location = R.prop('location', location_obj)
        is_generalized = R.prop('isGeneralized', location_obj)
        # The key where then node is stored is the stage key
        node_stage = raw_node[stage_key]
        # Get key from name or it's already a key
        key = R.prop('key', R.prop_or(dict(key=node_stage), node_stage, stage_by_name))

        # Copy all properties from resource.data  except settings and raw_data
        # Also grab raw_node properties
        # This is for arbitrary properties defined in the data
        # We put them in properties and propertyValues since graphql hates arbitrary key/values
        properties = R.merge(
            R.omit(['settings', 'rawData'], R.prop('data', resource)),
            raw_node
        )
        properties[node_name_key] = humanize(properties[node_name_key])
        return R.merge(
            # Omit accum[key] since we'll concat it with the new node
            R.omit([key], accum),
            {
                # concat accum[key] or [] with the new node
                key: R.concat(
                    R.prop_or([], key, accum),
                    # Note that the value is an array so we can combine nodes with the same stage key
                    [
                        dict(
                            value=string_to_float(R.prop(value_key, raw_node)),
                            type='Feature',
                            geometry=dict(
                                type='Point',
                                coordinates=location
                            ),
                            name=R.prop(node_name_key, raw_node),
                            isGeneralized=is_generalized,
                            properties=list(R.keys(properties)),
                            propertyValues=list(R.values(properties))
                        )
                    ]
                )
            }
        )
def find_scope_instances(user_state_scope, new_data):
    """
        Retrieve the scope instances to verify the Ids.
        Scope instances must have ids unless they are allowed to be created/updated
        during the userState mutation (such as searchLocations)
    :param new_data: The data to search
    :param user_state_scope Dict with 'pick' in the shape of the instances we are looking for in new_data,
    e.g. dict(userRegions={region: True}) to search new_data.userRegions[] for all occurrences of {region:...}
     and 'key' which indicates the actually key of the instance (e.g. 'region' for regions)
    :return: dict(
        instances=Instances actually in the database,
    )
    """
    def until(key, value):
        return key != R.prop('key', user_state_scope)

    return R.compose(
        lambda scope_dict: dict(
            # See which instances with ids are actually in the database
            # If any are missing we have an invalid update or need to create those instances if permitted
            instances=list(
                find_scope_instances_by_id(R.prop('model', user_state_scope),
                                           scope_dict['scope_ids'])),
            # The path from userRegions or userProjects to the scope instances, used to replace
            # a null update value with the existing values
            user_scope_path=list(R.keys(R.flatten_dct(user_state_scope, '.')))[
                0],
            **scope_dict),
        lambda scope_objs: dict(
            # Unique by id or accept if there is no id, this loses data, but it's just for validation
            scope_objs=R.unique_by(
                lambda obj: R.prop_or(str(now()), 'id', obj['value']),
                scope_objs),
            scope_ids=R.unique_by(
                R.identity,
                compact(
                    R.map(
                        lambda scope_obj: R.prop_or(None, 'id', scope_obj[
                            'value']), scope_objs)))),
        # Use the pick key property to find the scope instances in the data
        # If we don't match anything we can get null or an empty item. Filter/compact these out
        R.filter(lambda obj: obj['value'] and (not isinstance(
            obj['value'], list) or R.length(obj['value']) != 0)),
        R.map(lambda pair: dict(key=pair[0], value=pair[1])),
        lambda flattened_data: R.to_pairs(flattened_data),
        lambda data: R.flatten_dct_until(
            R.pick_deep_all_array_items(R.prop('pick', user_state_scope), data
                                        ), until, '.'))(new_data)
def resolver_for_feature_collection(resource, context, **kwargs):
    """
        Like resolver but takes care of converting the geos value stored in the field to a dict that
        has the values we want to resolve, namely type and features.
    :param {string} resource: The instance whose json field data is being resolved
    :param {ResolveInfo} context: Graphene context which contains the fields queried in field_asts
    :return: {DataTuple} Standard resolver return value
    """

    # Take the camelized keys. We don't store data fields slugified. We leave them camelized
    selections = R.map(lambda sel: sel.name.value, context.field_asts[0].selection_set.selections)
    # Recover the json by parsing the string provided by GeometryCollection and mapping the geometries property to features
    json = R.compose(
        # Map the value GeometryCollection to FeatureCollection for the type property
        R.map_with_obj(lambda k, v: R.if_else(
            R.equals('type'),
            R.always('FeatureCollection'),
            R.always(v)
        )(k)),
        # Map geometries to features: [{type: Feature, geometry: geometry}]
        lambda dct: R.merge(
            # Remove geometries
            R.omit(['geometries'], dct),
            # Add features containing the geometries
            dict(features=R.map(
                lambda geometry: dict(type='Feature', geometry=geometry),
                R.prop_or([], 'geometries', dct))
            )
        ),
    )(ast.literal_eval(R.prop(context.field_name, resource).json))
    # Identify the keys that are actually in resource[json_field_name]
    all_selections = R.filter(
        lambda key: key in json,
        selections
    )
    # Pick out the values that we want
    result = R.pick(all_selections, json)

    # Return in the standard Graphene DataTuple
    return namedtuple('DataTuple', R.keys(result))(*R.values(result))
示例#5
0
def create_user_state_config(class_config):
    """
        Creates the UserStateType based on specific class_config
    :param class_config: A dict containing class configurations. The default is:
    dict(
        settings=dict(
            model_class=Settings,
            graphene_class=SettingsType,
            graphene_fields=settings_fields,
            query=SettingsQuery,
            mutation=SettingsMutation
        ),
        region=dict(
            model_class=Region,
            graphene_class=RegionType,
            graphene_fields=region_fields,
            query=RegionQuery,
            mutation=RegionMutation
        ),
        project=dict(
            model_class=Project,
            graphene_class=ProjectType,
            graphene_fields=project_fields,
            query=ProjectQuery,
            mutation=ProjectMutation
        ),
        resource=dict(
            model_class=Resource,
            graphene_class=ResourceType,
            graphene_fields=resource_fields,
            query=ResourceQuery,
            mutation=ResourceMutation
        ),
        location=get_location_schema(),
        user_search=get_user_search_data_schema(),
        search_location=get_search_location_schema()
        # additional_user_scope_schemas and additional_user_scopes
        # are passed in from a calling app
        # these are a dict of properties that need to go on user_regions and user_projects
        # at the same level as userSearch. For instance, a user's saved app selections could go here
        # additional_user_scope_schemas = dict(
        # userDesignFeatureLayers=dict(
        #    graphene_class=UserDesignFeatureDataType,
        #    graphene_fields=user_design_feature_data_fields
        # )
        # additional_user_scopes explains the path to Django models within additional_user_scope_schemas
        # additional_django_model_user_scopes = dict(
        # userDesignFeatureLayers=dict(
        #   designFeature=True
        # )
        # Would match the list of some django DesignFeature model instances
    )
    :return:
    """

    class UserStateType(DjangoObjectType, DjangoObjectTypeRevisionedMixin):
        """
            UserStateType models UserState, which represents the settings both imposed upon and chosen by the user
        """
        id = graphene.Int(source='pk')

        class Meta:
            model = UserState

    # Modify data field to use the resolver.
    # I guess there's no way to specify a resolver upon field creation, since graphene just reads the underlying
    # Django model to generate the fields
    UserStateType._meta.fields['data'] = Field(
        UserStateDataType(class_config),
        resolver=resolver_for_data_field
    )

    user_state_fields = merge_with_django_properties(UserStateType, dict(
        id=dict(create=DENY, update=REQUIRE),
        # This is a Foreign Key. Graphene generates these relationships for us, but we need it here to
        # support our Mutation subclasses and query_argument generation
        # For simplicity we limit fields to id. Mutations can only use id, and a query doesn't need other
        # details of the User--it can query separately for that
        user=dict(graphene_type=UserType, fields=user_fields),
        # This refers to the UserState, which is a representation of all the json fields of UserState.data
        data=dict(graphene_type=UserStateDataType(class_config), fields=user_state_data_fields(class_config),
                  default=lambda: dict()),
        **reversion_and_safe_delete_types
    ))

    user_state_mutation_config = dict(
        class_name='UserState',
        crud={
            CREATE: 'createUserState',
            UPDATE: 'updateUserState'
        },
        resolve=guess_update_or_create
    )

    additional_django_model_user_scopes = R.prop('additional_django_model_user_scopes', class_config) \
        if R.prop_or(None, 'additional_django_model_user_scopes', class_config) else {}
    additional_user_scope_schemas = R.prop('additional_user_scope_schemas', class_config) \
        if R.prop_or(None, 'additional_user_scope_schemas', class_config) else {}

    # The scope instance types expected in user_state.data
    user_state_scope_instances_config = R.concat([
        # dict(region=True) means search all userRegions for that dict
        dict(pick=dict(userRegions=dict(region=True)),
             key='region',
             model=get_region_model()
             ),
        # dict(project=True) means search all userProjects for that dict
        dict(
            pick=dict(userProjects=dict(project=True)),
            key='project',
            model=get_project_model(),
            # This is currently just needed for the field key's unique_with function
            field_config=project_fields,
            # Projects can be modified when userState is mutated
            can_mutate_related=True
        ),
        dict(
            pick=dict(
                userRegions=[
                    dict(
                        userSearch=dict(
                            # dict(searchLocation=True) means search all userSearchLocations for that dict
                            userSearchLocations=dict(
                                searchLocation=True,
                            )
                        )
                    )
                ],
                userProjects=[
                    dict(
                        userSearch=dict(
                            # dict(searchLocation=True) means search all userSearchLocations for that dict
                            userSearchLocations=dict(searchLocation=True)
                        )
                    )
                ]
            ),
            key='searchLocation',
            model=get_search_location_schema()['model_class'],
            # These can be modified when userState is mutated
            can_mutate_related=True
        ),
    ],
        # Map each additional_django_model_user_scopes to a scope config
        R.map_with_obj_to_values(
            lambda field_name, additional_django_model_user_scope: dict(
                pick=dict(
                    userRegions=[
                        {field_name: additional_django_model_user_scope}
                    ],
                    userProjects=[
                        {field_name: additional_django_model_user_scope}
                    ]
                ),
                # Assume the scope object is the deepest field
                key=list(R.keys(R.flatten_dct(additional_django_model_user_scope, '.')))[0].split('.')[-1],
                # model isn't needed unless can_mutate_related is true
                model=additional_user_scope_schemas[field_name]['model'],
                # These can be modified when userState is mutated
                can_mutate_related=R.prop_or(False, 'can_mutate_related', additional_django_model_user_scope)
            ),
            additional_django_model_user_scopes
        )
    )

    class UpsertUserState(Mutation):
        """
            Abstract base class for mutation
        """
        user_state = Field(UserStateType)

        def mutate(self, info, user_state_data=None):
            """
                Update or create the user state
            :param info:
            :param user_state_data:
            :return:
            """

            # Check that all the scope instances in user_state.data exist. We permit deleted instances for now.
            new_data = R.prop_or({}, 'data', user_state_data)
            # Copy since Graphene reuses this data
            copied_new_data = copy.deepcopy(new_data)
            old_user_state_data = UserState.objects.get(
                id=user_state_data['id']
            ).data if R.prop_or(None, 'id', user_state_data) else None

            # Inspect the data and find all scope instances within UserState.data
            # This includes userRegions[*].region, userProject[*].project and within userRegions and userProjects
            # userSearch.userSearchLocations[*].search_location and whatever the implementing libraries define
            # in addition
            updated_new_data = validate_and_mutate_scope_instances(
                user_state_scope_instances_config,
                copied_new_data
            )

            # If either userProjects or userRegions are null, it means those scope instances aren't part
            # of the update, so merge in the old values
            if R.prop_or(None, 'id', user_state_data) and R.any_satisfy(
                    lambda user_scope_key: not R.prop_or(None, user_scope_key, updated_new_data),
                    ['userProjects', 'userRegions']
            ):
                # The special update case where one userScope collection is null,
                # indicates that we are only updating one userScope object. The rest
                # should remain the same and not be removed
                for user_scope_key in ['userProjects', 'userRegions']:
                    # Database values
                    old_user_scopes_by_id = user_scope_instances_by_id(
                        user_scope_key,
                        old_user_state_data
                    )
                    # New values with updates applied
                    new_user_scopes_by_id = user_scope_instances_by_id(
                        user_scope_key,
                        updated_new_data
                    )
                    # Prefer the old over the new, merging all objects but overriding lists
                    # We override lists because a non-null list always replaces the old list in the database
                    updated_new_data[user_scope_key] = R.values(R.merge_deep(
                        old_user_scopes_by_id,
                        new_user_scopes_by_id,
                        MyMerger(
                            # pass in a list of tuples,with the
                            # strategies you are looking to apply
                            # to each type.
                            [
                                (list, ["override_non_null"]),
                                (dict, ["merge"])
                            ],
                            # next, choose the fallback strategies,
                            # applied to all other types:
                            ["override"],
                            # finally, choose the strategies in
                            # the case where the types conflict:
                            ["override"]
                        )
                    ))

            # Update user_state_data the updated data
            modified_user_state_data = R.merge(user_state_data, dict(data=updated_new_data))

            # id or user.id can be used to identify the existing instance
            id_props = R.compact_dict(
                dict(
                    id=R.prop_or(None, 'id', modified_user_state_data),
                    user_id=R.item_str_path_or(None, 'user.id', modified_user_state_data)
                )
            )

            def fetch_and_merge(modified_user_state_data, props):
                existing = UserState.objects.filter(**props)
                # If the user doesn't have a user state yet
                if not R.length(existing):
                    return modified_user_state_data

                return merge_data_fields_on_update(
                    ['data'],
                    R.head(existing),
                    # Merge existing's id in case it wasn't in user_state_data
                    R.merge(modified_user_state_data, R.pick(['id'], existing))
                )

            modified_data = R.if_else(
                R.compose(R.length, R.keys),
                lambda props: fetch_and_merge(modified_user_state_data, props),
                lambda _: modified_user_state_data
            )(id_props)

            update_or_create_values = input_type_parameters_for_update_or_create(
                user_state_fields,
                # Make sure that all props are unique that must be, either by modifying values or erring.
                enforce_unique_props(
                    user_state_fields,
                    modified_data)
            )

            user_state, created = update_or_create_with_revision(UserState, update_or_create_values)
            return UpsertUserState(user_state=user_state)

    class CreateUserState(UpsertUserState):
        """
            Create UserState mutation class
        """

        class Arguments:
            user_state_data = type('CreateUserStateInputType', (InputObjectType,),
                                   input_type_fields(user_state_fields, CREATE, UserStateType)
                                   )(required=True)

    class UpdateUserState(UpsertUserState):
        """
            Update UserState mutation class
        """

        class Arguments:
            user_state_data = type('UpdateUserStateInputType', (InputObjectType,),
                                   input_type_fields(user_state_fields, UPDATE, UserStateType))(required=True)

    graphql_update_or_create_user_state = graphql_update_or_create(user_state_mutation_config, user_state_fields)
    graphql_query_user_states = graphql_query(UserStateType, user_state_fields, 'userStates')

    return dict(
        model_class=UserState,
        graphene_class=UserStateType,
        graphene_fields=user_state_fields,
        create_mutation_class=CreateUserState,
        update_mutation_class=UpdateUserState,
        graphql_mutation=graphql_update_or_create_user_state,
        graphql_query=graphql_query_user_states
    )
 def test_update_fields(self):
     self.assertMatchSnapshot(
         list(R.keys(input_type_fields(user_fields, UPDATE, UserType))))
     self.assertMatchSnapshot(
         list(R.keys(input_type_fields(foo_fields, UPDATE, FooType))))
 def test_query_fields(self):
     self.assertMatchSnapshot(
         list(R.keys(allowed_read_fields(user_fields, UserType))))
     self.assertMatchSnapshot(
         list(R.keys(allowed_read_fields(foo_fields, UserType))))
def handle_can_mutate_related(model, related_model_scope_config, data,
                              validated_scope_objs_instances_and_ids):
    """
        Mutates the given related models of an instance if permitted
        See rescape-region's UserState for a working usage
    :param model: The related model
    :param related_model_scope_config: Configuration of the related model relative to the referencing instance
    :param data: The data containing thphee related models dicts to possibly mutate with
    :param validated_scope_objs_instances_and_ids: Config of the related objects that have been validated as
    existing in the database for objects not being created
    :return: Possibly mutates instances, returns data with newly created ids set
    """
    def make_fields_unique_if_needed(scope_obj):
        # If a field needs to be unique, like a key, call it's unique_with method
        return R.map_with_obj(
            lambda key, value: R.item_str_path_or(
                R.identity, f'field_config.{key}.unique_with',
                related_model_scope_config)(scope_obj), scope_obj)

    def convert_foreign_key_to_id(scope_obj):
        # Find ForeignKey attributes and map the class field name to the foreign key id field
        # E.g. region to region_id, user to user_id, etc
        converters = R.compose(
            R.from_pairs, R.map(lambda field: [field.name, field.attname]),
            R.filter(lambda field: R.isinstance(ForeignKey, field)))(
                model._meta.fields)
        # Convert scopo_obj[related_field] = {id: x} to scope_obj[related_field_id] = x
        return R.from_pairs(
            R.map_with_obj_to_values(
                lambda key, value: [converters[key],
                                    R.prop('id', value)]
                if R.has(key, converters) else [key, value], scope_obj))

    def omit_to_many(scope_obj):
        return R.omit(R.map(R.prop('attname'), model._meta.many_to_many),
                      scope_obj)

    # This indicates that scope_objs were submitted that didn't have ids
    # This is allowed if those scope_objs can be created/updated when the userState is mutated
    if R.prop_or(False, 'can_mutate_related', related_model_scope_config):
        for scope_obj_key_value in validated_scope_objs_instances_and_ids[
                'scope_objs']:

            scope_obj = scope_obj_key_value['value']
            scope_obj_path = scope_obj_key_value['key']
            if R.length(R.keys(R.omit(['id'], scope_obj))):
                modified_scope_obj = R.compose(
                    convert_foreign_key_to_id, omit_to_many,
                    make_fields_unique_if_needed)(scope_obj)
                if R.prop_or(False, 'id', scope_obj):
                    # Update, we don't need the result since it's already in user_state.data
                    instance, created = model.objects.update_or_create(
                        defaults=R.omit(['id'], modified_scope_obj),
                        **R.pick(['id'], scope_obj))
                else:
                    # Create
                    instance = model(**modified_scope_obj)
                    instance.save()
                    # We need to replace the object
                    # passed in with an object containing the id of the instance
                    data = R.fake_lens_path_set(scope_obj_path.split('.'),
                                                R.pick(['id'], instance), data)

                for to_many in model._meta.many_to_many:
                    if to_many.attname in R.keys(scope_obj):
                        # Set existing related values to the created/updated instances
                        getattr(instance, to_many.attname).set(
                            R.map(R.prop('id'), scope_obj[to_many.attname]))
    return data