Skip to content

Commit

Permalink
pagination-has-more
Browse files Browse the repository at this point in the history
  • Loading branch information
benawad committed Aug 14, 2020
1 parent 2faad7d commit c5468d3
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 31 deletions.
24 changes: 20 additions & 4 deletions server/src/resolvers/post.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
Int,
FieldResolver,
Root,
ObjectType,
} from "type-graphql";
import { Post } from "../entities/Post";
import { MyContext } from "../types";
Expand All @@ -24,32 +25,47 @@ class PostInput {
text: string;
}

@ObjectType()
class PaginatedPosts {
@Field(() => [Post])
posts: Post[];
@Field()
hasMore: boolean;
}

@Resolver(Post)
export class PostResolver {
@FieldResolver(() => String)
textSnippet(@Root() post: Post) {
return post.text.slice(0, 50);
}

@Query(() => [Post])
@Query(() => PaginatedPosts)
async posts(
@Arg("limit", () => Int) limit: number,
@Arg("cursor", () => String, { nullable: true }) cursor: string | null
): Promise<Post[]> {
): Promise<PaginatedPosts> {
// 20 -> 21
const realLimit = Math.min(50, limit);
const reaLimitPlusOne = realLimit + 1;
const qb = getConnection()
.getRepository(Post)
.createQueryBuilder("p")
.orderBy('"createdAt"', "DESC")
.take(realLimit);
.take(reaLimitPlusOne);

if (cursor) {
qb.where('"createdAt" < :cursor', {
cursor: new Date(parseInt(cursor)),
});
}

return qb.getMany();
const posts = await qb.getMany();

return {
posts: posts.slice(0, realLimit),
hasMore: posts.length === reaLimitPlusOne,
};
}

@Query(() => Post, { nullable: true })
Expand Down
35 changes: 24 additions & 11 deletions web/src/generated/graphql.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export type Scalars = {
export type Query = {
__typename?: 'Query';
hello: Scalars['String'];
posts: Array<Post>;
posts: PaginatedPosts;
post?: Maybe<Post>;
me?: Maybe<User>;
};
Expand All @@ -31,6 +31,12 @@ export type QueryPostArgs = {
id: Scalars['Float'];
};

export type PaginatedPosts = {
__typename?: 'PaginatedPosts';
posts: Array<Post>;
hasMore: Scalars['Boolean'];
};

export type Post = {
__typename?: 'Post';
id: Scalars['Float'];
Expand Down Expand Up @@ -237,10 +243,14 @@ export type PostsQueryVariables = Exact<{

export type PostsQuery = (
{ __typename?: 'Query' }
& { posts: Array<(
{ __typename?: 'Post' }
& Pick<Post, 'id' | 'createdAt' | 'updatedAt' | 'title' | 'textSnippet'>
)> }
& { posts: (
{ __typename?: 'PaginatedPosts' }
& Pick<PaginatedPosts, 'hasMore'>
& { posts: Array<(
{ __typename?: 'Post' }
& Pick<Post, 'id' | 'createdAt' | 'updatedAt' | 'title' | 'textSnippet'>
)> }
) }
);

export const RegularErrorFragmentDoc = gql`
Expand Down Expand Up @@ -347,12 +357,15 @@ export function useMeQuery(options: Omit<Urql.UseQueryArgs<MeQueryVariables>, 'q
};
export const PostsDocument = gql`
query Posts($limit: Int!, $cursor: String) {
posts(cursor: $cursor, limit: $limit) {
id
createdAt
updatedAt
title
textSnippet
posts(limit: $limit, cursor: $cursor) {
hasMore
posts {
id
createdAt
updatedAt
title
textSnippet
}
}
}
`;
Expand Down
15 changes: 9 additions & 6 deletions web/src/graphql/queries/posts.graphql
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
query Posts($limit: Int!, $cursor: String) {
posts(cursor: $cursor, limit: $limit) {
id
createdAt
updatedAt
title
textSnippet
posts(limit: $limit, cursor: $cursor) {
hasMore
posts {
id
createdAt
updatedAt
title
textSnippet
}
}
}
10 changes: 4 additions & 6 deletions web/src/pages/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,10 @@ import { useState } from "react";

const Index = () => {
const [variables, setVariables] = useState({
limit: 10,
limit: 33,
cursor: null as null | string,
});

console.log(variables);

const [{ data, fetching }] = usePostsQuery({
variables,
});
Expand All @@ -35,21 +33,21 @@ const Index = () => {
<div>loading...</div>
) : (
<Stack spacing={8}>
{data!.posts.map((p) => (
{data!.posts.posts.map((p) => (
<Box key={p.id} p={5} shadow="md" borderWidth="1px">
<Heading fontSize="xl">{p.title}</Heading>
<Text mt={4}>{p.textSnippet}</Text>
</Box>
))}
</Stack>
)}
{data ? (
{data && data.posts.hasMore ? (
<Flex>
<Button
onClick={() => {
setVariables({
limit: variables.limit,
cursor: data.posts[data.posts.length - 1].createdAt,
cursor: data.posts.posts[data.posts.posts.length - 1].createdAt,
});
}}
isLoading={fetching}
Expand Down
23 changes: 19 additions & 4 deletions web/src/utils/createUrqlClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import {
} from "../generated/graphql";
import { betterUpdateQuery } from "./betterUpdateQuery";
import Router from "next/router";
import { FieldsOnCorrectTypeRule } from "graphql";

const errorExchange: Exchange = ({ forward }) => (ops$) => {
return pipe(
Expand All @@ -40,15 +39,28 @@ const cursorPagination = (): Resolver => {
}

const fieldKey = `${fieldName}(${stringifyVariables(fieldArgs)})`;
const isItInTheCache = cache.resolveFieldByKey(entityKey, fieldKey);
const isItInTheCache = cache.resolve(
cache.resolveFieldByKey(entityKey, fieldKey) as string,
"posts"
);
info.partial = !isItInTheCache;
let hasMore = true;
const results: string[] = [];
fieldInfos.forEach((fi) => {
const data = cache.resolveFieldByKey(entityKey, fi.fieldKey) as string[];
const key = cache.resolveFieldByKey(entityKey, fi.fieldKey) as string;
const data = cache.resolve(key, "posts") as string[];
const _hasMore = cache.resolve(key, "hasMore");
if (!_hasMore) {
hasMore = _hasMore as boolean;
}
results.push(...data);
});

return results;
return {
__typename: "PaginatedPosts",
hasMore,
posts: results,
};

// const visited = new Set();
// let result: NullArray<string> = [];
Expand Down Expand Up @@ -112,6 +124,9 @@ export const createUrqlClient = (ssrExchange: any) => ({
exchanges: [
dedupExchange,
cacheExchange({
keys: {
PaginatedPosts: () => null,
},
resolvers: {
Query: {
posts: cursorPagination(),
Expand Down

0 comments on commit c5468d3

Please sign in to comment.