diff --git a/.api-reports/api-report-utilities_internal.api.md b/.api-reports/api-report-utilities_internal.api.md index d1055bf60ca..790b197514d 100644 --- a/.api-reports/api-report-utilities_internal.api.md +++ b/.api-reports/api-report-utilities_internal.api.md @@ -100,9 +100,20 @@ export type DecoratedPromise = PendingPromise | FulfilledPromise export function decoratePromise(promise: Promise): DecoratedPromise; // @internal @deprecated (undocumented) -export class DeepMerger { +export namespace DeepMerger { + // (undocumented) + export type ArrayMergeStrategy = "truncate" | "combine"; + // (undocumented) + export interface Options { + // (undocumented) + arrayMerge?: DeepMerger.ArrayMergeStrategy; + } +} + +// @internal @deprecated (undocumented) +export class DeepMerger { // Warning: (ae-forgotten-export) The symbol "ReconcilerFunction" needs to be exported by the entry point index.d.ts - constructor(reconciler?: ReconcilerFunction); + constructor(reconciler?: ReconcilerFunction, options?: DeepMerger.Options); // (undocumented) isObject: typeof isNonNullObject; // (undocumented) diff --git a/.changeset/cold-kiwis-give.md b/.changeset/cold-kiwis-give.md new file mode 100644 index 00000000000..880998840fe --- /dev/null +++ b/.changeset/cold-kiwis-give.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": minor +--- + +Fix an issue where deferred payloads that reteurned arrays with fewer items than the original cached array would retain items from the cached array. This change includes `@stream` arrays where stream arrays replace the cached arrays. diff --git a/.changeset/neat-lemons-shave.md b/.changeset/neat-lemons-shave.md new file mode 100644 index 00000000000..d7357691800 --- /dev/null +++ b/.changeset/neat-lemons-shave.md @@ -0,0 +1,5 @@ +--- +"@apollo/client": patch +--- + +Improve the cache data loss warning message when `existing` or `incoming` is an array. diff --git a/.size-limits.json b/.size-limits.json index e4f01b44776..97c94ef5038 100644 --- a/.size-limits.json +++ b/.size-limits.json @@ -1,6 +1,6 @@ { - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44194, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39041, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33526, - "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27519 + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (CJS)": 44386, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production) (CJS)": 39203, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\"": 33554, + "import { ApolloClient, InMemoryCache, HttpLink } from \"@apollo/client\" (production)": 27582 } diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index b44b6eb02f6..d852dc06248 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -894,8 +894,8 @@ For more information about these options, please refer to the documentation: " have an ID or a custom merge function, or " : "", typeDotName, - { ...existing }, - { ...incoming } + Array.isArray(existing) ? [...existing] : { ...existing }, + Array.isArray(incoming) ? [...incoming] : { ...incoming } ); } diff --git a/src/core/__tests__/client.watchQuery/defer20220824.test.ts b/src/core/__tests__/client.watchQuery/defer20220824.test.ts index 36c6ba5b8bb..d8934c96ea5 100644 --- a/src/core/__tests__/client.watchQuery/defer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/defer20220824.test.ts @@ -6,6 +6,7 @@ import { InMemoryCache } from "@apollo/client/cache"; import { Defer20220824Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { + markAsStreaming, mockDefer20220824, ObservableStream, } from "@apollo/client/testing/internal"; @@ -163,3 +164,186 @@ test("deduplicates queries as long as a query still has deferred chunks", async // expect(query5).not.toEmitAnything(); expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); }); + +it.each([["cache-first"], ["no-cache"]] as const)( + "correctly merges deleted rows when receiving a deferred payload", + async (fetchPolicy) => { + const query = gql` + query Characters { + characters { + id + uppercase + ... @defer { + lowercase + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDefer20220824(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new Defer20220824Handler(), + }); + + const observable = client.watchQuery({ query, fetchPolicy }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }, + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [{ data: { lowercase: "a" }, path: ["characters", 0] }], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "b" }, path: ["characters", 1] }, + { data: { lowercase: "c" }, path: ["characters", 2] }, + ], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + void observable.refetch(); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.refetch, + partial: false, + }); + + // on refetch, the list is shorter + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ], + }, + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: + // no-cache fetch policy doesn't merge with existing cache data, so + // the lowercase field is not added to each item + fetchPolicy === "no-cache" ? + [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ] + : [ + { + __typename: "Character", + id: 1, + uppercase: "A", + lowercase: "a", + }, + { + __typename: "Character", + id: 2, + uppercase: "B", + lowercase: "b", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "a" }, path: ["characters", 0] }, + { data: { lowercase: "b" }, path: ["characters", 1] }, + ], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); + } +); diff --git a/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts index 035ce0525df..5464258a417 100644 --- a/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/deferGraphQL17Alpha9.test.ts @@ -6,6 +6,7 @@ import { InMemoryCache } from "@apollo/client/cache"; import { GraphQL17Alpha9Handler } from "@apollo/client/incremental"; import { ApolloLink } from "@apollo/client/link"; import { + markAsStreaming, mockDeferStreamGraphQL17Alpha9, ObservableStream, } from "@apollo/client/testing/internal"; @@ -173,3 +174,198 @@ test("deduplicates queries as long as a query still has deferred chunks", async // expect(query5).not.toEmitAnything(); expect(outgoingRequestSpy).toHaveBeenCalledTimes(2); }); + +it.each([["cache-first"], ["no-cache"]] as const)( + "correctly merges deleted rows when receiving a deferred payload", + async (fetchPolicy) => { + const query = gql` + query Characters { + characters { + id + uppercase + ... @defer { + lowercase + } + } + } + `; + + const { httpLink, enqueueInitialChunk, enqueueSubsequentChunk } = + mockDeferStreamGraphQL17Alpha9(); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: httpLink, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const observable = client.watchQuery({ query, fetchPolicy }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitTypedValue({ + data: undefined, + dataState: "empty", + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }, + pending: [ + { id: "0", path: ["characters", 0] }, + { id: "1", path: ["characters", 1] }, + { id: "2", path: ["characters", 2] }, + ], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [{ data: { lowercase: "a" }, id: "0" }], + completed: [{ id: "0" }], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B" }, + { __typename: "Character", id: 3, uppercase: "C" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "b" }, id: "1" }, + { data: { lowercase: "c" }, id: "2" }, + ], + completed: [{ id: "1" }, { id: "2" }], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + void observable.refetch(); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + { __typename: "Character", id: 3, uppercase: "C", lowercase: "c" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.refetch, + partial: false, + }); + + // on refetch, the list is shorter + enqueueInitialChunk({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ], + }, + pending: [ + { id: "0", path: ["characters", 0] }, + { id: "1", path: ["characters", 1] }, + ], + hasNext: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + characters: + // no-cache fetch policy doesn't merge with existing cache data, so + // the lowercase field is not available in the refetch + fetchPolicy === "no-cache" ? + [ + { __typename: "Character", id: 1, uppercase: "A" }, + { __typename: "Character", id: 2, uppercase: "B" }, + ] + : [ + { + __typename: "Character", + id: 1, + uppercase: "A", + lowercase: "a", + }, + { + __typename: "Character", + id: 2, + uppercase: "B", + lowercase: "b", + }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + enqueueSubsequentChunk({ + incremental: [ + { data: { lowercase: "a" }, id: "0" }, + { data: { lowercase: "b" }, id: "1" }, + ], + completed: [{ id: "0" }, { id: "1" }], + hasNext: false, + }); + + await expect(stream).toEmitTypedValue({ + data: { + characters: [ + { __typename: "Character", id: 1, uppercase: "A", lowercase: "a" }, + { __typename: "Character", id: 2, uppercase: "B", lowercase: "b" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); + } +); diff --git a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts index 01ab8f1f78a..8216613d93a 100644 --- a/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts +++ b/src/core/__tests__/client.watchQuery/streamDefer20220824.test.ts @@ -17,6 +17,7 @@ import { ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; +import { hasDirectives } from "@apollo/client/utilities/internal"; const friends = [ { name: "Luke", id: 1 }, @@ -879,3 +880,110 @@ test("handles @defer inside @stream", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("can use custom merge function to combine cached and streamed lists", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (existing = [], incoming, { field }) => { + if (field && hasDirectives(["stream"], field)) { + const merged: any[] = []; + + for ( + let i = 0; + i < Math.max(existing.length, incoming.length); + i++ + ) { + merged[i] = + incoming[i] === undefined ? existing[i] : incoming[i]; + } + + return merged; + } + + return incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => friends.map((friend) => Promise.resolve(friend)), + }), + cache, + incrementalHandler: new Defer20220824Handler(), + }); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + const stream = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "cache-and-network" }) + ); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + partial: false, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }), + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); +}); diff --git a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts index bdcd108a54e..fe33f498d34 100644 --- a/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts +++ b/src/core/__tests__/client.watchQuery/streamGraphQL17Alpha9.test.ts @@ -17,6 +17,7 @@ import { ObservableStream, promiseWithResolvers, } from "@apollo/client/testing/internal"; +import { hasDirectives } from "@apollo/client/utilities/internal"; const friends = [ { name: "Luke", id: 1 }, @@ -792,7 +793,7 @@ test("handles @defer inside @stream", async () => { const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion, - } = promiseWithResolvers(); + } = promiseWithResolvers(); const client = new ApolloClient({ link: createLink({ @@ -841,7 +842,7 @@ test("handles @defer inside @stream", async () => { partial: true, }); - resolveIterableCompletion(null); + resolveIterableCompletion(); await expect(observableStream).toEmitSimilarValue({ expected: (previous) => ({ @@ -883,3 +884,124 @@ test("handles @defer inside @stream", async () => { await expect(observableStream).not.toEmitAnything(); }); + +test("can use custom merge function to combine cached and streamed lists", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (existing = [], incoming, { field }) => { + if (field && hasDirectives(["stream"], field)) { + const merged: any[] = []; + + for ( + let i = 0; + i < Math.max(existing.length, incoming.length); + i++ + ) { + merged[i] = + incoming[i] === undefined ? existing[i] : incoming[i]; + } + + return merged; + } + + return incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + link: createLink({ + friendList: () => friends.map((friend) => Promise.resolve(friend)), + }), + cache, + incrementalHandler: new GraphQL17Alpha9Handler(), + }); + + const query = gql` + query { + friendList @stream(initialCount: 1) { + id + name + } + } + `; + + client.writeQuery({ + query, + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + }); + + const stream = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "cache-and-network" }) + ); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Cached Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }, + dataState: "complete", + loading: true, + networkStatus: NetworkStatus.loading, + partial: false, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Cached Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: markAsStreaming({ + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Cached Leia" }, + ], + }), + dataState: "streaming", + loading: true, + networkStatus: NetworkStatus.streaming, + partial: true, + }); + + await expect(stream).toEmitTypedValue({ + data: { + friendList: [ + { __typename: "Friend", id: "1", name: "Luke" }, + { __typename: "Friend", id: "2", name: "Han" }, + { __typename: "Friend", id: "3", name: "Leia" }, + ], + }, + dataState: "complete", + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + await expect(stream).not.toEmitAnything(); +}); diff --git a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts index 13c09126dc9..3f2cd4e4fed 100644 --- a/src/incremental/handlers/__tests__/defer20220824/stream.test.ts +++ b/src/incremental/handlers/__tests__/defer20220824/stream.test.ts @@ -1586,7 +1586,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, }); @@ -1604,7 +1603,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, chunk @@ -1654,7 +1652,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, }); @@ -1672,7 +1669,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, chunk @@ -1792,8 +1788,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -1811,8 +1805,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, chunk @@ -1823,7 +1815,6 @@ test("properly merges streamed data into list with more items", async () => { { name: "Luke", id: "1" }, { name: "Han", id: "2" }, { name: "Leia", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -1909,7 +1900,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + nestedFriendList: [], }, }, }); @@ -1925,7 +1916,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, }); @@ -1942,7 +1933,7 @@ test("properly merges cache data when list is included in deferred chunk", async { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, chunk diff --git a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts index 7b00b258c9f..bded4641629 100644 --- a/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts +++ b/src/incremental/handlers/__tests__/graphql17Alpha9/stream.test.ts @@ -2484,7 +2484,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, }); @@ -2502,7 +2501,6 @@ test("properly merges streamed data into cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, ], }, chunk @@ -2554,7 +2552,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, }); @@ -2572,7 +2569,6 @@ test("properly merges streamed data into partial cache data", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { id: "3" }, ], }, chunk @@ -2697,8 +2693,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -2716,8 +2710,6 @@ test("properly merges streamed data into list with more items", async () => { friendList: [ { name: "Luke", id: "1" }, { name: "Han", id: "2" }, - { name: "Leia Cached", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, chunk @@ -2728,7 +2720,6 @@ test("properly merges streamed data into list with more items", async () => { { name: "Luke", id: "1" }, { name: "Han", id: "2" }, { name: "Leia", id: "3" }, - { name: "Chewbacca Cached", id: "4" }, ], }, }); @@ -2814,7 +2805,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke Cached" }, { name: "Han Cached" }], + nestedFriendList: [], }, }, }); @@ -2830,7 +2821,7 @@ test("properly merges cache data when list is included in deferred chunk", async data: { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, }); @@ -2847,7 +2838,7 @@ test("properly merges cache data when list is included in deferred chunk", async { nestedObject: { scalarField: "slow", - nestedFriendList: [{ name: "Luke" }, { name: "Han Cached" }], + nestedFriendList: [{ name: "Luke" }], }, }, chunk diff --git a/src/incremental/handlers/defer20220824.ts b/src/incremental/handlers/defer20220824.ts index 27ce3d3c96d..13a495f4ab6 100644 --- a/src/incremental/handlers/defer20220824.ts +++ b/src/incremental/handlers/defer20220824.ts @@ -75,9 +75,15 @@ class DeferRequest> private extensions: Record = {}; private data: any = {}; - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate" + ) { if (normalized.data !== undefined) { - this.data = new DeepMerger().merge(this.data, normalized.data); + this.data = new DeepMerger(undefined, { arrayMerge }).merge( + this.data, + normalized.data + ); } if (normalized.errors) { this.errors.push(...normalized.errors); @@ -98,6 +104,7 @@ class DeferRequest> if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { const { path, errors, extensions } = incremental; + let arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate"; let data = // The item merged from a `@stream` chunk is always the first item in // the `items` array @@ -112,15 +119,21 @@ class DeferRequest> const key = path[i]; const isNumericKey = !isNaN(+key); const parent: Record = isNumericKey ? [] : {}; + if (isNumericKey) { + arrayMerge = "combine"; + } parent[key] = data; data = parent as typeof data; } } - this.merge({ - errors, - extensions, - data: data ? (data as TData) : undefined, - }); + this.merge( + { + errors, + extensions, + data: data ? (data as TData) : undefined, + }, + arrayMerge + ); } } diff --git a/src/incremental/handlers/graphql17Alpha9.ts b/src/incremental/handlers/graphql17Alpha9.ts index 2355ba10b05..51ea4acc714 100644 --- a/src/incremental/handlers/graphql17Alpha9.ts +++ b/src/incremental/handlers/graphql17Alpha9.ts @@ -119,7 +119,7 @@ class IncrementalRequest } } - this.merge(chunk); + this.merge(chunk, "truncate"); if (hasIncrementalChunks(chunk)) { for (const incremental of chunk.incremental) { @@ -133,6 +133,7 @@ class IncrementalRequest const path = pending.path.concat(incremental.subPath ?? []); let data: any; + let arrayMerge: DeepMerger.ArrayMergeStrategy = "truncate"; if ("items" in incremental) { const items = incremental.items as any[]; const parent: any[] = []; @@ -178,14 +179,20 @@ class IncrementalRequest const parent: Record = typeof key === "number" ? [] : {}; parent[key] = data; + if (typeof key === "number") { + arrayMerge = "combine"; + } data = parent; } - this.merge({ - data, - extensions: incremental.extensions, - errors: incremental.errors, - }); + this.merge( + { + data, + extensions: incremental.extensions, + errors: incremental.errors, + }, + arrayMerge + ); } } @@ -212,9 +219,15 @@ class IncrementalRequest return result; } - private merge(normalized: FormattedExecutionResult) { + private merge( + normalized: FormattedExecutionResult, + arrayMerge: DeepMerger.ArrayMergeStrategy + ) { if (normalized.data !== undefined) { - this.data = new DeepMerger().merge(this.data, normalized.data); + this.data = new DeepMerger(undefined, { arrayMerge }).merge( + this.data, + normalized.data + ); } if (normalized.errors) { diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx index b34b85aa6c6..bff598f1047 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamDefer20220824.test.tsx @@ -125,7 +125,19 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }); cache.writeQuery({ query, data: { @@ -177,11 +189,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -200,7 +208,6 @@ test('does not suspend deferred queries with data in the cache and using a "cach friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, dataState: "streaming", @@ -251,13 +258,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); interface QueryData { friendList: Array<{ __typename: "Friend"; id: string; name: string }>; @@ -274,14 +275,26 @@ test('does not suspend deferred queries with partial data in the cache and using const client = new ApolloClient({ link: createLink({ friendList: () => stream }), - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }), incrementalHandler: new Defer20220824Handler(), }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -336,13 +349,7 @@ test('does not suspend deferred queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - // @ts-expect-error - { __typename: "Friend", id: "2" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -361,8 +368,6 @@ test('does not suspend deferred queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, ], }, dataState: "streaming", diff --git a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx index fd15889a9ad..e5010152249 100644 --- a/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery/streamGraphQL17Alpha9.test.tsx @@ -125,7 +125,19 @@ test('does not suspend deferred queries with data in the cache and using a "cach } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }); cache.writeQuery({ query, data: { @@ -177,11 +189,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -200,7 +208,6 @@ test('does not suspend deferred queries with data in the cache and using a "cach friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, dataState: "streaming", @@ -251,13 +258,7 @@ test('does not suspend deferred queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); interface QueryData { friendList: Array<{ __typename: "Friend"; id: string; name: string }>; @@ -274,14 +275,26 @@ test('does not suspend deferred queries with partial data in the cache and using const client = new ApolloClient({ link: createLink({ friendList: () => stream }), - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => { + return incoming; + }, + }, + }, + }, + }, + }), incrementalHandler: new GraphQL17Alpha9Handler(), }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -336,13 +349,7 @@ test('does not suspend deferred queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useReadQuery"]); expect(snapshot).toStrictEqualTyped({ data: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - // @ts-expect-error - { __typename: "Friend", id: "2" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, dataState: "streaming", error: undefined, @@ -361,8 +368,6 @@ test('does not suspend deferred queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - // @ts-expect-error - { __typename: "Friend", id: "3" }, ], }, dataState: "streaming", diff --git a/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx index 6432ce1d093..aad6b5a7618 100644 --- a/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useQuery/streamDefer20220824.test.tsx @@ -491,7 +491,17 @@ test('returns eventually consistent data from streamed queries with data in the `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -532,11 +542,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -558,18 +564,13 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -592,7 +593,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -613,7 +613,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -622,13 +621,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -640,7 +633,17 @@ test('returns eventually consistent data from streamed queries with partial data `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -648,7 +651,7 @@ test('returns eventually consistent data from streamed queries with partial data // We know we are writing partial data to the cache so suppress the console // warning. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -692,11 +695,7 @@ test('returns eventually consistent data from streamed queries with partial data await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -718,18 +717,13 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -752,7 +746,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, @@ -773,7 +766,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, diff --git a/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx index ab0e022c798..28a65e677f0 100644 --- a/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useQuery/streamGraphQL17Alpha9.test.tsx @@ -491,7 +491,17 @@ test('returns eventually consistent data from streamed queries with data in the `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -532,11 +542,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -558,18 +564,13 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -592,7 +593,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -613,7 +613,6 @@ test('returns eventually consistent data from streamed queries with data in the friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, ], }, variables: {}, @@ -622,13 +621,7 @@ test('returns eventually consistent data from streamed queries with data in the await expect(takeSnapshot).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('returns eventually consistent data from streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { stream, subject } = asyncIterableSubject(); const query = gql` query { @@ -640,7 +633,17 @@ test('returns eventually consistent data from streamed queries with partial data `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -648,7 +651,7 @@ test('returns eventually consistent data from streamed queries with partial data // We know we are writing partial data to the cache so suppress the console // warning. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); client.writeQuery({ query, data: { @@ -692,11 +695,7 @@ test('returns eventually consistent data from streamed queries with partial data await expect(takeSnapshot()).resolves.toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", loading: true, @@ -718,18 +717,13 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", loading: true, networkStatus: NetworkStatus.streaming, previousData: { - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }, variables: {}, }); @@ -752,7 +746,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, @@ -773,7 +766,6 @@ test('returns eventually consistent data from streamed queries with partial data friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }, variables: {}, diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx index 0d2d441d4c5..4468bb2a2d2 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamDefer20220824.test.tsx @@ -368,13 +368,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); const query = gql` @@ -386,12 +380,22 @@ test('does not suspend streamed queries with partial data in the cache and using } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); cache.writeQuery({ query, data: { @@ -446,11 +450,7 @@ test('does not suspend streamed queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -469,7 +469,6 @@ test('does not suspend streamed queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", @@ -514,7 +513,17 @@ test('does not suspend streamed queries with data in the cache and using a "cach `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new Defer20220824Handler(), }); @@ -562,11 +571,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -689,11 +694,7 @@ test("incrementally rerenders data returned by a `refetch` for a streamed query" expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke (refetch)" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1615,11 +1616,7 @@ test("can refetch and respond to cache updates after encountering an error in an expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1638,7 +1635,6 @@ test("can refetch and respond to cache updates after encountering an error in an friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, ], }), dataState: "streaming", diff --git a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx index 8eafae1220b..791d712bd25 100644 --- a/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery/streamGraphQL17Alpha9.test.tsx @@ -368,13 +368,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach await expect(takeRender).not.toRerender(); }); -// TODO: Determine how we handle partial data with streamed responses. While this -// works as expected and renders correctly, this also emits missing field -// warnings in the console when writing the result to the cache since array items -// with partial cache data are still included for items that haven't streamed in -// yet. test('does not suspend streamed queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { - using _TODO_REMOVE_ME_AFTER_DECIDING_COMMENT = spyOnConsole("error"); const { subject, stream } = asyncIterableSubject(); const query = gql` @@ -386,12 +380,22 @@ test('does not suspend streamed queries with partial data in the cache and using } `; - const cache = new InMemoryCache(); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }); // We are intentionally writing partial data to the cache. Supress console // warnings to avoid unnecessary noise in the test. { - // using _consoleSpy = spyOnConsole("error"); + using _consoleSpy = spyOnConsole("error"); cache.writeQuery({ query, data: { @@ -446,11 +450,7 @@ test('does not suspend streamed queries with partial data in the cache and using expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2" }, - { __typename: "Friend", id: "3" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -469,7 +469,6 @@ test('does not suspend streamed queries with partial data in the cache and using friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3" }, ], }), dataState: "streaming", @@ -514,7 +513,17 @@ test('does not suspend streamed queries with data in the cache and using a "cach `; const client = new ApolloClient({ - cache: new InMemoryCache(), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + friendList: { + merge: (_, incoming) => incoming, + }, + }, + }, + }, + }), link: createLink({ friendList: () => stream }), incrementalHandler: new GraphQL17Alpha9Handler(), }); @@ -562,11 +571,7 @@ test('does not suspend streamed queries with data in the cache and using a "cach expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - { __typename: "Friend", id: "2", name: "Cached Han" }, - { __typename: "Friend", id: "3", name: "Cached Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -689,11 +694,7 @@ test("incrementally rerenders data returned by a `refetch` for a streamed query" expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke (refetch)" }, - { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke (refetch)" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1643,11 +1644,7 @@ test("can refetch and respond to cache updates after encountering an error in an expect(renderedComponents).toStrictEqual(["useSuspenseQuery"]); expect(snapshot).toStrictEqualTyped({ data: markAsStreaming({ - friendList: [ - { __typename: "Friend", id: "1", name: "Luke" }, - null, - { __typename: "Friend", id: "3", name: "Leia" }, - ], + friendList: [{ __typename: "Friend", id: "1", name: "Luke" }], }), dataState: "streaming", networkStatus: NetworkStatus.streaming, @@ -1666,7 +1663,6 @@ test("can refetch and respond to cache updates after encountering an error in an friendList: [ { __typename: "Friend", id: "1", name: "Luke" }, { __typename: "Friend", id: "2", name: "Han" }, - { __typename: "Friend", id: "3", name: "Leia" }, ], }), dataState: "streaming", diff --git a/src/utilities/internal/DeepMerger.ts b/src/utilities/internal/DeepMerger.ts index ebbcce20660..91d28780387 100644 --- a/src/utilities/internal/DeepMerger.ts +++ b/src/utilities/internal/DeepMerger.ts @@ -19,12 +19,38 @@ const defaultReconciler: ReconcilerFunction = function ( }; /** @internal */ -export class DeepMerger { +export declare namespace DeepMerger { + export interface Options { + arrayMerge?: DeepMerger.ArrayMergeStrategy; + } + + export type ArrayMergeStrategy = + // Truncate the target array to the source length, then deep merge the array + // items at the same index + | "truncate" + // Combine arrays and deep merge array items for items at the same index. + // This is the default + | "combine"; +} + +/** @internal */ +export class DeepMerger { constructor( - private reconciler: ReconcilerFunction = defaultReconciler as any as ReconcilerFunction + private reconciler: ReconcilerFunction = defaultReconciler as any as ReconcilerFunction, + private options: DeepMerger.Options = {} ) {} public merge(target: any, source: any, ...context: TContextArgs): any { + if ( + Array.isArray(target) && + Array.isArray(source) && + this.options.arrayMerge === "truncate" && + target.length > source.length + ) { + target = target.slice(0, source.length); + this.pastCopies.add(target); + } + if (isNonNullObject(source) && isNonNullObject(target)) { Object.keys(source).forEach((sourceKey) => { if (hasOwnProperty.call(target, sourceKey)) { diff --git a/src/utilities/internal/__tests__/DeepMerger.test.ts b/src/utilities/internal/__tests__/DeepMerger.test.ts index 88d6b05a2ba..01b0361ac83 100644 --- a/src/utilities/internal/__tests__/DeepMerger.test.ts +++ b/src/utilities/internal/__tests__/DeepMerger.test.ts @@ -94,3 +94,34 @@ test("provides optional context to reconciler function", function () { expect(typicalContextValues[0]).toBe(contextObject); expect(typicalContextValues[1]).toBe(contextObject); }); + +test("deep merges each array item keeping length by default", () => { + const target = [{ a: 1, b: { c: 2 } }, { e: 5 }]; + const source = [{ a: 2, b: { c: 2, d: 3 } }]; + + const result = new DeepMerger().merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }, { e: 5 }]); +}); + +test("deep merges each array item and truncates source to target length when using truncate arrayMerge", () => { + const target = [{ a: 1, b: { c: 2 } }, { e: 5 }]; + const source = [{ a: 2, b: { c: 2, d: 3 } }]; + + const result = new DeepMerger(undefined, { + arrayMerge: "truncate", + }).merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2, d: 3 } }]); +}); + +test("maintains source length when using truncate arrayMerge when source is longer than target length", () => { + const target = [{ a: 1, b: { c: 2 } }]; + const source = [{ a: 2 }, { e: 2 }]; + + const result = new DeepMerger(undefined, { + arrayMerge: "truncate", + }).merge(target, source); + + expect(result).toEqual([{ a: 2, b: { c: 2 } }, { e: 2 }]); +});