def mutate(cls, root, info, input): try: _user = info.context.user _post = Post(user=_user, title=input.title, content=input.content) if input.is_private: _post.is_private = input.is_private _post.save() tag_list = input.tags.split() for tag in tag_list: if '#' in tag: tag = tag.replace('#', '') tagAlreadyExists = Tag.objects.filter(name=tag).exists() if not tagAlreadyExists: _tag = Tag(name=tag) _tag.save() _tag.posts.add(_post) else: _tag = Tag.objects.get(name=tag) _tag.posts.add(_post) _post_edge = PostConnection.Edge(cursor = offset_to_cursor(Post.objects.count()), node=_post) return PostCreate(post_edge=_post_edge) except Exception as err: raise GraphQLError("PostCreate err : ", err)
def mutate_and_get_payload(cls, input, request, info): gid_type, gid = from_global_id(input.get('lifeNode')) node = LifeNode._meta.model.objects.get(document_id=gid) error = has_permission(cls, request, node, 'edit') if error: return error tag_title = input.get('title').strip(' \t\n\r') tag_slug = slugify(tag_title) try: tag = TagModel.objects.get(slug=tag_slug) except TagModel.DoesNotExist: tag = TagModel( title=tag_title, slug=tag_slug, ) tag.save(request=request) c = CharacteristicModel( tag=tag.document, lifeNode=node.document, value=input.get('value') ) c.save(request=request) return CharacteristicAdd( lifeNode=node, characteristic=Characteristic.Connection.Edge( node=c, cursor=offset_to_cursor(0)) )
async def connection_from_gino_array_slice( array_slice, args: ConnectionArguments = None, slice_start: int = 0, array_length: int = None, array_slice_length: int = None, connection_type: Any = Connection, edge_type: Any = Edge, page_info_type: Any = PageInfo, ) -> Connection: """Create a connection object from a slice of the result set.""" args = args or {} before = args.get("before") after = args.get("after") first = args.get("first") last = args.get("last") if array_slice_length is None: array_slice_length = len(array_slice) slice_end = slice_start + array_slice_length if array_length is None: array_length = slice_end before_offset = get_offset_with_default(before, array_length) after_offset = get_offset_with_default(after, -1) start_offset = max(slice_start - 1, after_offset, -1) + 1 end_offset = min(slice_end, before_offset, array_length) if isinstance(first, int): if first < 0: raise ValueError("Argument 'first' must be a non-negative integer.") end_offset = min(end_offset, start_offset + first) if isinstance(last, int): if last < 0: raise ValueError("Argument 'last' must be a non-negative integer.") start_offset = max(start_offset, end_offset - last) # If supplied slice is too large, trim it down before mapping over it. trimmed_slice = await array_slice[ start_offset - slice_start : array_slice_length - (slice_end - end_offset) ].gino.all() edges = [ edge_type(node=value, cursor=offset_to_cursor(start_offset + index)) for index, value in enumerate(trimmed_slice) ] first_edge_cursor = edges[0].cursor if edges else None last_edge_cursor = edges[-1].cursor if edges else None upper_bound = before_offset if before else array_length return connection_type( edges=edges, page_info=page_info_type( start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=start_offset > 0, has_next_page=isinstance(first, int) and end_offset < upper_bound, ), )
def resolver(self, args, context, info, session): query = query_class(session) filters = args.get('filters') if filters: typed = filter_to_internal.cast_from(filters) criteria = tuple(query.criteria_from_dict(typed)) query.filter(*criteria) filtered_count = len(query) order_by = args.get('orderBy') if order_by: order_by_values = OrderedDict(_parse_order_by(order_by)) mapped = order_by_to_mapped.cast_from(order_by_values) # TODO only add this sort by id if there is no other sort by a unique field # required in order have stable sorting and paging when sorting by a non-unique field mapped['_id'] = True criteria = tuple(query.sort_criteria_from_dict(mapped)) query.order_by(*criteria) offset = 0 after = args.get('after') if after: offset = int(Node.from_global_id(after)[1]) query.offset(offset) offset += 1 args.pop('after') first = args.get('first') if first: limit = int(first) query.limit(limit) instances = list(query.collection()) context['count'] = filtered_count context['pageInfo'] = PageInfo( start_cursor=offset_to_cursor(query.start_index), end_cursor=offset_to_cursor(query.end_index), has_previous_page=False if query.start_index is None else query.start_index > 0, has_next_page=False if query.end_index is None else query.end_index < filtered_count - 1) return instances
def mutate_and_get_payload(cls, root, info, **data): store = graphene.Node.get_node_from_global_id(info, data['store_id']) product = models.Product.objects.create( store=store, name=data['name'], price=data['price'], ) return CreateProductMutation(product_edge=ProductEdge( cursor=offset_to_cursor(0), node=product), )
def test__filter_casts(self): num_instances = 4 offset = 0 limit = 3 aclass_id = 6666 aclass_global_id = Node.to_global_id('AClass', aclass_id) filters = {'aclass': aclass_global_id} order_by = None expected_instances = [Mock() for _ in range(num_instances)] # mock entities with row numbers mock_results = [(instance, index + 1) for index, instance in enumerate(expected_instances)] mock_query = Mock() mock_session = Mock() mock_query.add_columns.return_value = mock_query def apply_filter(criteria): # check that AClass id has been decoded correctly self.assertEqual(str(aclass_id), criteria.right.value) return mock_query mock_query.filter.side_effect = apply_filter mock_query.count.return_value = len(mock_results) def apply_offset(): del mock_results[0:offset] del expected_instances[0:offset] return mock_query mock_query.from_self.side_effect = apply_offset def apply_limit(num_results): del mock_results[num_results:] del expected_instances[num_results:] return mock_query mock_query.limit.side_effect = apply_limit mock_query.order_by.return_value = mock_query mock_query.all.return_value = mock_results mock_session.query.return_value = mock_query context = {'request': {'session': mock_session}} args = {} if offset: args['after'] = offset_to_cursor(offset) if limit: args['first'] = limit if filters: args['filters'] = filters if order_by: args['orderBy'] = order_by info = TestConnection.InfoFixture(context) instances = areferring_connection_field.resolver(None, info, **args)
def mutate(cls, root, info, input): _counsel = Counsel.objects.get(pk=from_global_id(input.counsel_id)[1]) _writer = info.context.user _chat = Chat(counsel=_counsel, writer=_writer, content=input.content) _chat.save() counselID = input.counsel_id senderID = _writer.id MessageSent.announce(counselID, senderID, input.content) _chat_edge = CounselChatConnection.Edge(cursor = offset_to_cursor(Chat.objects.count()), node=_chat) return ChatSend(chat_edge=_chat_edge)
def mutate(cls, root, info, input): try: _postId = from_global_id(input.postId)[1] _user = info.context.user _post = Post.objects.get(id=_postId) _comment = Comment(user=_user, post=_post, content=input.content) _comment.save() _comment_edge = CommentEdge(cursor=offset_to_cursor(Comment.objects.count()), node=_comment) print("User: {}, Comment: {}".format(_user.email, _comment.content)) return CommentCreate(comment_edge=_comment_edge) except Exception as err: raise GraphQLError(err)
def mutate(cls, root, info, input): try: card = Card() card.author = input.author card.save() card_edge = CardEdge(cursor=offset_to_cursor(Card.objects.count()), node=card) CardHasCreated.announce("group1", card.author, card) return NewCard(card_edge=card_edge, ok=True) except Exception as err: print("NewCard error : ", err) return NewCard(card_edge=None, ok=False)
def publish(payload, info, counsel_id): if payload["sender_id"] == info.context.user.id: return MessageSent.SKIP _counsel = Counsel.objects.get(pk=from_global_id(counsel_id)[1]) _writer = User.objects.get(pk=payload["sender_id"]) _chat = Chat(counsel=_counsel, writer=_writer, content=payload["content"]) _chat_edge = CounselChatConnection.Edge(cursor=offset_to_cursor( Chat.objects.count()), node=_chat) return MessageSent(chat_edge=_chat_edge)
def _apply_filter_with_mocks(self, total_instances, offset, limit, filters, order_by): num_instances = total_instances - len(filters) if filters else 0 expected_instances = [Mock() for _ in range(num_instances)] # mock entities with row numbers mock_results = [(instance, index + 1) for index, instance in enumerate(expected_instances)] mock_query = Mock() mock_session = Mock() mock_query.add_columns.return_value = mock_query mock_query.filter.return_value = mock_query mock_query.count.return_value = len(mock_results) def apply_offset(): del mock_results[0:offset] del expected_instances[0:offset] return mock_query mock_query.from_self.side_effect = apply_offset def apply_limit(num_results): del mock_results[num_results:] del expected_instances[num_results:] return mock_query mock_query.limit.side_effect = apply_limit mock_query.order_by.return_value = mock_query mock_query.all.return_value = mock_results mock_session.query.return_value = mock_query context = {'request': {'session': mock_session}} args = {} if offset: args['after'] = offset_to_cursor(offset) if limit: args['first'] = limit if filters: args['filters'] = filters if order_by: args['orderBy'] = order_by info = TestConnection.InfoFixture(context) instances = aclass_connection_field.resolver(None, info, **args) return context, expected_instances, instances, mock_query, mock_session
def resolve_connection(cls, connection, args, iterable, max_limit=None): # Remove the offset parameter and convert it to an after cursor. offset = args.pop("offset", None) after = args.get("after") if offset: if after: offset += cursor_to_offset(after) + 1 # input offset starts at 1 while the graphene offset starts at 0 args["after"] = offset_to_cursor(offset - 1) iterable = maybe_queryset(iterable) if isinstance(iterable, QuerySet): list_length = iterable.count() else: list_length = len(iterable) list_slice_length = (min(max_limit, list_length) if max_limit is not None else list_length) # If after is higher than list_length, connection_from_list_slice # would try to do a negative slicing which makes django throw an # AssertionError after = min( get_offset_with_default(args.get("after"), -1) + 1, list_length) if max_limit is not None and "first" not in args: if "last" in args: args["first"] = list_length list_slice_length = list_length else: args["first"] = max_limit connection = connection_from_list_slice( iterable[after:], args, slice_start=after, list_length=list_length, list_slice_length=list_slice_length, connection_type=connection, edge_type=connection.Edge, pageinfo_type=PageInfo, ) connection.iterable = iterable connection.length = list_length return connection
def connection_from_iterables(edges, start_offset, has_previous_page, has_next_page, connection_type, edge_type, pageinfo_type): edges_items = [ edge_type(node=node, cursor=offset_to_cursor( (0 if start_offset is None else start_offset) + i)) for i, node in enumerate(edges) ] first_edge_cursor = edges_items[0].cursor if edges_items else None last_edge_cursor = edges_items[-1].cursor if edges_items else None return connection_type(edges=edges_items, page_info=pageinfo_type( start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=has_previous_page, has_next_page=has_next_page))
def mutate_and_get_payload(cls, input, context, info): viewer = get_viewer(context) todo = viewer.todos.create(text=input.get("text")) cursor = offset_to_cursor(viewer.todos.count() - 1) edge = Todo.Connection.Edge(node=todo, cursor=cursor) return AddTodo(viewer=viewer, todoEdge=edge)
def connection_from_query(cls, query, args=None, connection_type=None, edge_type=None, pageinfo_type=None): """ similar to connection_from_list, but replace some of page operations to database limit...offset... it will be much faster and save more memory """ connection_type = connection_type or graphene.relay.Connection edge_type = edge_type or connection_type.Edge pageinfo_type = pageinfo_type or graphene.relay.PageInfo args = args or {} before = cursor_to_offset(args.get('before', '')) after = cursor_to_offset(args.get('after', '')) first = args.get('first', None) last = args.get('last', None) offset = 0 limit = None slice_start = None if after is not None: offset = after + 1 if before is not None: limit = max(before - offset, 0) if first is not None: limit = min(limit, first) elif last is not None: offset = max(before - last, offset) limit = max(before - offset, 0) else: if first is not None: limit = first elif last is not None: slice_start = -last if limit is not None: query = query.limit(limit + 1) query = query.offset(offset) query_result = list(query) _len = len(query_result) if limit is not None and _len > limit: query_result = query_result[:-1] cursor_offset = offset if slice_start is not None: cursor_offset = offset + _len + slice_start edges = [ edge_type(node=node, cursor=offset_to_cursor(cursor_offset + i)) for i, node in enumerate(query_result[slice_start:]) ] first_edge_cursor = edges[0].cursor if edges else None last_edge_cursor = edges[-1].cursor if edges else None first_edge_offset = cursor_to_offset(first_edge_cursor) last_edge_offset = cursor_to_offset(last_edge_cursor) has_previous_page = bool(first_edge_offset and last and (first_edge_offset > 0 if after is None else first_edge_offset > after + 1)) has_next_page = bool(last_edge_cursor and first and (_len > limit if before is None else last_edge_offset < before - 1)) return connection_type(edges=edges, page_info=pageinfo_type( start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=has_previous_page, has_next_page=has_next_page))
def connection_from_list_slice( list_slice, args=None, connection_type=None, edge_type=None, pageinfo_type=None, slice_start=0, list_length=0, list_slice_length=None, ): """ Replace graphql_relay.connection.arrayconnection.connection_from_list_slice. This can be removed, when (or better if) https://github.com/graphql-python/graphql-relay-py/issues/12 is resolved. Given a slice (subset) of an array, returns a connection object for use in GraphQL. This function is similar to `connectionFromArray`, but is intended for use cases where you know the cardinality of the connection, consider it too large to materialize the entire array, and instead wish pass in a slice of the total result large enough to cover the range specified in `args`. """ connection_type = connection_type or Connection edge_type = edge_type or Edge pageinfo_type = pageinfo_type or PageInfo args = args or {} before = args.get("before") after = args.get("after") first = args.get("first") last = args.get("last") if list_slice_length is None: # pragma: no cover list_slice_length = len(list_slice) slice_end = slice_start + list_slice_length before_offset = get_offset_with_default(before, list_length) after_offset = get_offset_with_default(after, -1) start_offset = max(slice_start - 1, after_offset, -1) + 1 end_offset = min(slice_end, before_offset, list_length) if isinstance(first, int): end_offset = min(end_offset, start_offset + first) if isinstance(last, int): start_offset = max(start_offset, end_offset - last) # If supplied slice is too large, trim it down before mapping over it. _slice = list_slice[max(start_offset - slice_start, 0):list_slice_length - (slice_end - end_offset)] edges = [ edge_type(node=node, cursor=offset_to_cursor(start_offset + i)) for i, node in enumerate(_slice) ] first_edge_cursor = edges[0].cursor if edges else None last_edge_cursor = edges[-1].cursor if edges else None return connection_type( edges=edges, page_info=pageinfo_type( start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=start_offset > 0, has_next_page=end_offset < list_length, ), )
def resolve_posts(self, args, context, info): discussion_id = context.matchdict['discussion_id'] discussion = models.Discussion.get(discussion_id) random = args.get('random', False) Post = models.Post related = self.get_related_posts_query(True) # If random is True returns 10 posts, the first one is the latest post # created by the user, then the remaining ones are in random order. # If random is False, return all the posts in creation_date desc order. if random: user_id = context.authenticated_userid if user_id is None: first_post = None else: first_post = Post.query.join( related, Post.id == related.c.post_id).filter( Post.creator_id == user_id).order_by( desc(Post.creation_date), Post.id).first() query = Post.default_db.query(Post.id).join( related, Post.id == related.c.post_id) # retrieve ids, do the random and get the posts for these ids post_ids = [e[0] for e in query] limit = args.get('first', 10) if first_post is not None: first_post_id = first_post.id post_ids = [ post_id for post_id in post_ids if post_id != first_post_id ] limit -= 1 random_posts_ids = random_sample(post_ids, min(len(post_ids), limit)) query = Post.query.filter(Post.id.in_(random_posts_ids)).options( joinedload(Post.creator), ) if len(discussion.discussion_locales) > 1: query = query.options( models.LangString.subqueryload_option(Post.body)) else: query = query.options( models.LangString.joinedload_option(Post.body)) # The query always gives the posts in the same order. # We need to random it again. posts = query.all() random_shuffle(posts) if first_post is not None: query = [first_post] + posts else: # The related query returns a list of # (<PropositionPost id=2 >, None) # instead of <PropositionPost id=2 > when authenticated, # this is why we do another query here: query = Post.query.join( related, Post.id == related.c.post_id).filter( Post.publication_state == models.PublicationStates.PUBLISHED).order_by( desc(Post.creation_date), Post.id).options(joinedload(Post.creator)) if len(discussion.discussion_locales) > 1: query = query.options( models.LangString.subqueryload_option(Post.body)) else: query = query.options( models.LangString.joinedload_option(Post.body)) from_node = args.get('from_node') after = args.get('after') before = args.get('before') # If `from_node` is specified and after/before is None or empty string, # search the position of this node to set the `after` parameter # which is actually `arrayconnection:position` in base64. if from_node and not after and not before: post_id = int(Node.from_global_id(from_node)[-1]) node_idx = len( list( takewhile(lambda post: post[0] != post_id, query.with_entities(Post.id)))) args['after'] = offset_to_cursor(node_idx - 1) # pagination is done after that, no need to do it ourself return query
def connection_from_list_slice(list_slice, args=None, connection_type=None, edge_type=None, pageinfo_type=None, slice_start=0, list_length=0, list_slice_length=None, connection_field=None): """ Given a slice (subset) of an array, returns a connection object for use in GraphQL. This function is similar to `connectionFromArray`, but is intended for use cases where you know the cardinality of the connection, consider it too large to materialize the entire array, and instead wish pass in a slice of the total result large enough to cover the range specified in `args`. """ connection_type = connection_type or Connection edge_type = edge_type or Edge pageinfo_type = pageinfo_type or PageInfo args = args or {} before = args.get('before') after = args.get('after') first = args.get('first') last = args.get('last') enforce_first_or_last = args.get("enforce_first_or_last") max_limit = args.get("max_limit") if enforce_first_or_last: assert first or last, ( "You must provide a `first` or `last` value to properly " "paginate the `{}` connection.").format(connection_type) if max_limit: if first or last: if first: assert first <= max_limit, ( "Requesting {} records on the `{}` connection exceeds " "the `first` limit of {} records.").format( first, connection_type, max_limit) first = args["first"] = min(first, max_limit) if last: assert last <= max_limit, ( "Requesting {} records on the `{}` connection exceeds " "the `last` limit of {} records.").format( last, connection_type, max_limit) last = args["last"] = min(last, max_limit) else: first = max_limit if list_slice_length is None: list_slice_length = len(list_slice) slice_end = slice_start + list_slice_length before_offset = get_offset_with_default(before, list_length) after_offset = get_offset_with_default(after, -1) start_offset = max(slice_start - 1, after_offset, -1) + 1 end_offset = min(slice_end, before_offset, list_length) if isinstance(first, int): end_offset = min(end_offset, start_offset + first) if isinstance(last, int): start_offset = max(start_offset, end_offset - last) # If supplied slice is too large, trim it down before mapping over it. _slice_qs = list_slice[max(start_offset - slice_start, 0):list_slice_length - (slice_end - end_offset)] logger.debug_json(_slice_qs.to_dict()) _slice = _slice_qs.execute() edges = [ edge_type(node=node, cursor=offset_to_cursor(start_offset + i)) for i, node in enumerate(_slice) ] first_edge_cursor = edges[0].cursor if edges else None last_edge_cursor = edges[-1].cursor if edges else None lower_bound = after_offset + 1 if after else 0 upper_bound = before_offset if before else list_length conn = connection_type( edges=edges, page_info=pageinfo_type( start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=(isinstance(last, int) and start_offset > lower_bound), has_next_page=isinstance(first, int) and end_offset < upper_bound), ) # This is certainly something to consider to change. Although there are # some original `graphene` code parts that do alter the connection object # directly, in principle, we shouldn't apply anything in this way, but # rather include all the things by overriding appropriate parts, # especially the connection class. However, at the moment of writing, # due to, perhaps, too little investigation on how to do it properly with # `graphene`, this seems to be the most simple and appropriate solution. for backend_cls in connection_field.filter_backends: if backend_cls.has_connection_fields: backend = backend_cls(connection_field) backend.alter_connection(conn, _slice) return conn
def resolve_connection(cls, connection_type, model, info, args, resolved): offset = args.get('offset') if offset is not None: args.setdefault('after', offset_to_cursor(offset - 1)) return super().resolve_connection(connection_type, model, info, args, resolved)