diff --git a/.vscode/settings.json b/.vscode/settings.json index dddb0e3d487..c2580b79482 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,5 +6,5 @@ "files.insertFinalNewline": true, "typescript.tsdk": "node_modules/typescript/lib", "cSpell.enableFiletypes": ["mdx"], - "jest.jestCommandLine": "node_modules/.bin/jest --config ./config/jest.config.js --ignoreProjects 'ReactDOM 17' --runInBand" + "jest.jestCommandLine": "node --expose-gc node_modules/.bin/jest --config ./config/jest.config.js --ignoreProjects 'ReactDOM 17' --runInBand" } diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts index fd49358b6d4..686d1c078ee 100644 --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -36,14 +36,8 @@ import { } from "../cache"; import { ApolloError } from "../errors"; -import { - itAsync, - subscribeAndCount, - mockSingleLink, - MockLink, - wait, -} from "../testing"; -import { spyOnConsole } from "../testing/internal"; +import { itAsync, mockSingleLink, MockLink, wait } from "../testing"; +import { ObservableStream, spyOnConsole } from "../testing/internal"; import { waitFor } from "@testing-library/react"; describe("client", () => { @@ -78,19 +72,19 @@ describe("client", () => { }); expect(() => { - client.query( + void client.query( gql` { a } ` as any ); - }).toThrowError( + }).toThrow( "query option is required. You must specify your GraphQL document in the query option." ); expect(() => { - client.query({ query: "{ a }" } as any); - }).toThrowError('You must wrap the query string in a "gql" tag.'); + void client.query({ query: "{ a }" } as any); + }).toThrow('You must wrap the query string in a "gql" tag.'); }); it("should throw an error if mutation option is missing", async () => { @@ -143,48 +137,44 @@ describe("client", () => { } ); - itAsync( - "should allow a single query with an apollo-link enabled network interface", - (resolve, reject) => { - const query = gql` - query people { - allPeople(first: 1) { - people { - name - __typename - } + it("should allow a single query with an apollo-link enabled network interface", async () => { + const query = gql` + query people { + allPeople(first: 1) { + people { + name __typename } + __typename } - `; + } + `; - const data = { - allPeople: { - people: [ - { - name: "Luke Skywalker", - __typename: "Person", - }, - ], - __typename: "People", - }, - }; + const data = { + allPeople: { + people: [ + { + name: "Luke Skywalker", + __typename: "Person", + }, + ], + __typename: "People", + }, + }; - const variables = { first: 1 }; + const variables = { first: 1 }; - const link = ApolloLink.from([() => Observable.of({ data })]); + const link = ApolloLink.from([() => Observable.of({ data })]); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ addTypename: false }), - }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ addTypename: false }), + }); - client.query({ query, variables }).then((actualResult) => { - expect(actualResult.data).toEqual(data); - resolve(); - }); - } - ); + const actualResult = await client.query({ query, variables }); + + expect(actualResult.data).toEqual(data); + }); itAsync( "should allow for a single query with complex default variables to take place", @@ -1773,7 +1763,7 @@ describe("client", () => { cache: new InMemoryCache(), }); expect(() => { - client.query({ query, returnPartialData: true } as QueryOptions); + void client.query({ query, returnPartialData: true } as QueryOptions); }).toThrowError(/returnPartialData/); }); @@ -1783,7 +1773,7 @@ describe("client", () => { cache: new InMemoryCache(), }); expect(() => { - client.query({ query, returnPartialData: true } as QueryOptions); + void client.query({ query, returnPartialData: true } as QueryOptions); }).toThrowError(/returnPartialData/); }); }); @@ -1910,11 +1900,11 @@ describe("client", () => { ); }); - itAsync("fetches from cache first, then network", (resolve, reject) => { + it("fetches from cache first, then network", async () => { const link = mockSingleLink({ request: { query }, result: { data: networkFetch }, - }).setOnError(reject); + }); const client = new ApolloClient({ link, @@ -1928,41 +1918,37 @@ describe("client", () => { fetchPolicy: "cache-and-network", }); - subscribeAndCount(reject, obs, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(initialData); - } else if (handleCount === 2) { - expect(result.data).toEqual(networkFetch); - resolve(); - } - }); + const stream = new ObservableStream(obs); + + await expect(stream).toEmitMatchedValue({ data: initialData }); + await expect(stream).toEmitMatchedValue({ data: networkFetch }); + + await expect(stream).not.toEmitAnything(); }); - itAsync( - "does not fail if cache entry is not present", - (resolve, reject) => { - const link = mockSingleLink({ - request: { query }, - result: { data: networkFetch }, - }).setOnError(reject); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ addTypename: false }), - }); + it("does not fail if cache entry is not present", async () => { + const link = mockSingleLink({ + request: { query }, + result: { data: networkFetch }, + }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ addTypename: false }), + }); - const obs = client.watchQuery({ - query, - fetchPolicy: "cache-and-network", - }); + const obs = client.watchQuery({ + query, + fetchPolicy: "cache-and-network", + }); + const stream = new ObservableStream(obs); - subscribeAndCount(reject, obs, (handleCount, result) => { - expect(handleCount).toBe(1); - expect(result.data).toEqual(networkFetch); - expect(result.loading).toBe(false); - resolve(); - }); - } - ); + await expect(stream).toEmitMatchedValue({ + loading: false, + data: networkFetch, + }); + + await expect(stream).not.toEmitAnything(); + }); itAsync("fails if network request fails", (resolve, reject) => { const link = mockSingleLink(); // no queries = no replies. @@ -2031,86 +2017,63 @@ describe("client", () => { }); describe("standby queries", () => { - itAsync( - "are not watching the store or notifying on updates", - (resolve, reject) => { - const query = gql` - { - test - } - `; - const data = { test: "ok" }; - const data2 = { test: "not ok" }; + it("are not watching the store or notifying on updates", async () => { + const query = gql` + { + test + } + `; + const data = { test: "ok" }; + const data2 = { test: "not ok" }; - const link = mockSingleLink({ - request: { query }, - result: { data }, - }).setOnError(reject); + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); - const client = new ApolloClient({ link, cache: new InMemoryCache() }); + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + const obs = client.watchQuery({ query, fetchPolicy: "cache-first" }); + const stream = new ObservableStream(obs); - const obs = client.watchQuery({ query, fetchPolicy: "cache-first" }); + await expect(stream).toEmitMatchedValue({ data }); - let handleCalled = false; - subscribeAndCount(reject, obs, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - obs.setOptions({ query, fetchPolicy: "standby" }).then(() => { - client.writeQuery({ query, data: data2 }); - // this write should be completely ignored by the standby query - }); - setTimeout(() => { - if (!handleCalled) { - resolve(); - } - }, 20); - } - if (handleCount === 2) { - handleCalled = true; - reject(new Error("Handle should never be called on standby query")); - } - }); - } - ); + await obs.setOptions({ query, fetchPolicy: "standby" }); + // this write should be completely ignored by the standby query + client.writeQuery({ query, data: data2 }); - itAsync( - "return the current result when coming out of standby", - (resolve, reject) => { - const query = gql` - { - test - } - `; - const data = { test: "ok" }; - const data2 = { test: "not ok" }; + await expect(stream).not.toEmitAnything(); + }); - const link = mockSingleLink({ - request: { query }, - result: { data }, - }).setOnError(reject); + it("return the current result when coming out of standby", async () => { + const query = gql` + { + test + } + `; + const data = { test: "ok" }; + const data2 = { test: "not ok" }; - const client = new ApolloClient({ link, cache: new InMemoryCache() }); + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); - const obs = client.watchQuery({ query, fetchPolicy: "cache-first" }); + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + const obs = client.watchQuery({ query, fetchPolicy: "cache-first" }); + const stream = new ObservableStream(obs); - subscribeAndCount(reject, obs, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - obs.setOptions({ query, fetchPolicy: "standby" }).then(() => { - client.writeQuery({ query, data: data2 }); - // this write should be completely ignored by the standby query - setTimeout(() => { - obs.setOptions({ query, fetchPolicy: "cache-first" }); - }, 10); - }); - } - if (handleCount === 2) { - expect(result.data).toEqual(data2); - resolve(); - } - }); - } - ); + await expect(stream).toEmitMatchedValue({ data }); + + await obs.setOptions({ query, fetchPolicy: "standby" }); + // this write should be completely ignored by the standby query + client.writeQuery({ query, data: data2 }); + setTimeout(() => { + void obs.setOptions({ query, fetchPolicy: "cache-first" }); + }, 10); + + await expect(stream).toEmitMatchedValue({ data: data2 }); + await expect(stream).not.toEmitAnything(); + }); }); describe("network-only fetchPolicy", () => { @@ -3587,252 +3550,232 @@ describe("@connection", () => { }, }; - itAsync( - "allows setting default options for watchQuery", - (resolve, reject) => { - const link = mockSingleLink({ - request: { query }, - result: { data: networkFetch }, - }).setOnError(reject); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ addTypename: false }), - defaultOptions: { - watchQuery: { - fetchPolicy: "cache-and-network", - }, + it("allows setting default options for watchQuery", async () => { + const link = mockSingleLink({ + request: { query }, + result: { data: networkFetch }, + }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ addTypename: false }), + defaultOptions: { + watchQuery: { + fetchPolicy: "cache-and-network", }, - }); - - client.writeQuery({ - query, - data: initialData, - }); + }, + }); - const obs = client.watchQuery({ - query, - // This undefined value should be ignored in favor of - // defaultOptions.watchQuery.fetchPolicy. - fetchPolicy: void 0, - }); + client.writeQuery({ + query, + data: initialData, + }); - subscribeAndCount(reject, obs, (handleCount, result) => { - const resultData = result.data; - if (handleCount === 1) { - expect(resultData).toEqual(initialData); - } else if (handleCount === 2) { - expect(resultData).toEqual(networkFetch); - resolve(); - } - }); - } - ); + const obs = client.watchQuery({ + query, + // This undefined value should be ignored in favor of + // defaultOptions.watchQuery.fetchPolicy. + fetchPolicy: void 0, + }); - itAsync( - "allows setting nextFetchPolicy in defaultOptions", - (resolve, reject) => { - let networkCounter = 0; - let nextFetchPolicyCallCount = 0; + const stream = new ObservableStream(obs); - const client = new ApolloClient({ - link: new ApolloLink( - (operation) => - new Observable((observer) => { - observer.next({ - data: { - count: networkCounter++, - }, - }); - observer.complete(); - }) - ), + await expect(stream).toEmitMatchedValue({ data: initialData }); + await expect(stream).toEmitMatchedValue({ data: networkFetch }); + await expect(stream).not.toEmitAnything(); + }); - cache: new InMemoryCache(), + it("allows setting nextFetchPolicy in defaultOptions", async () => { + let networkCounter = 0; + let nextFetchPolicyCallCount = 0; - defaultOptions: { - watchQuery: { - nextFetchPolicy(fetchPolicy, context) { - expect(++nextFetchPolicyCallCount).toBe(1); - expect(this.query).toBe(query); - expect(fetchPolicy).toBe("cache-first"); - - expect(context.reason).toBe("after-fetch"); - expect(context.observable).toBe(obs); - expect(context.options).toBe(obs.options); - expect(context.initialFetchPolicy).toBe("cache-first"); - - // Usually options.nextFetchPolicy applies only once, but a - // nextFetchPolicy function can set this.nextFetchPolicy - // again to perform an additional transition. - this.nextFetchPolicy = (fetchPolicy) => { - ++nextFetchPolicyCallCount; - return "cache-first"; - }; - - return "cache-and-network"; - }, + const client = new ApolloClient({ + link: new ApolloLink( + () => + new Observable((observer) => { + observer.next({ + data: { + count: networkCounter++, + }, + }); + observer.complete(); + }) + ), + cache: new InMemoryCache(), + defaultOptions: { + watchQuery: { + nextFetchPolicy(fetchPolicy, context) { + expect(++nextFetchPolicyCallCount).toBe(1); + expect(this.query).toBe(query); + expect(fetchPolicy).toBe("cache-first"); + + expect(context.reason).toBe("after-fetch"); + expect(context.observable).toBe(obs); + expect(context.options).toBe(obs.options); + expect(context.initialFetchPolicy).toBe("cache-first"); + + // Usually options.nextFetchPolicy applies only once, but a + // nextFetchPolicy function can set this.nextFetchPolicy + // again to perform an additional transition. + this.nextFetchPolicy = (fetchPolicy) => { + ++nextFetchPolicyCallCount; + return "cache-first"; + }; + + return "cache-and-network"; }, }, - }); + }, + }); - const query = gql` - query { - count - } - `; + const query = gql` + query { + count + } + `; - client.writeQuery({ - query, - data: { - count: "initial", - }, - }); + client.writeQuery({ + query, + data: { + count: "initial", + }, + }); - const obs = client.watchQuery({ query }); - - subscribeAndCount(reject, obs, (handleCount, result) => { - if (handleCount === 1) { - expect(nextFetchPolicyCallCount).toBe(1); - expect(result.data).toEqual({ count: "initial" }); - // Refetching makes a copy of the current options, which - // includes options.nextFetchPolicy, so the inner - // nextFetchPolicy function ends up getting called twice. - obs.refetch(); - } else if (handleCount === 2) { - expect(result.data).toEqual({ count: "initial" }); - expect(nextFetchPolicyCallCount).toBe(2); - } else if (handleCount === 3) { - expect(result.data).toEqual({ count: 0 }); - expect(nextFetchPolicyCallCount).toBe(2); - client.writeQuery({ - query, - data: { - count: "secondary", - }, - }); - } else if (handleCount === 4) { - expect(result.data).toEqual({ count: "secondary" }); - expect(nextFetchPolicyCallCount).toBe(3); - client.cache.evict({ fieldName: "count" }); - } else if (handleCount === 5) { - expect(result.data).toEqual({ count: 1 }); - expect(nextFetchPolicyCallCount).toBe(4); - expect(obs.options.fetchPolicy).toBe("cache-first"); - setTimeout(resolve, 50); - } else { - reject("too many results"); - } - }); - } - ); + const obs = client.watchQuery({ query }); + const stream = new ObservableStream(obs); - itAsync( - "can override global defaultOptions.watchQuery.nextFetchPolicy", - (resolve, reject) => { - let linkCount = 0; - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: new ApolloLink( - (request) => - new Observable((observer) => { - observer.next({ - data: { - linkCount: ++linkCount, - }, - }); - observer.complete(); - }) - ), - defaultOptions: { - watchQuery: { - nextFetchPolicy(currentFetchPolicy) { - reject( - new Error("should not have called global nextFetchPolicy") - ); - return currentFetchPolicy; - }, + await expect(stream).toEmitMatchedValue({ data: { count: "initial" } }); + expect(nextFetchPolicyCallCount).toBe(1); + + // Refetching makes a copy of the current options, which + // includes options.nextFetchPolicy, so the inner + // nextFetchPolicy function ends up getting called twice. + void obs.refetch(); + + await expect(stream).toEmitMatchedValue({ data: { count: "initial" } }); + expect(nextFetchPolicyCallCount).toBe(2); + + await expect(stream).toEmitMatchedValue({ data: { count: 0 } }); + expect(nextFetchPolicyCallCount).toBe(2); + + client.writeQuery({ + query, + data: { + count: "secondary", + }, + }); + + await expect(stream).toEmitMatchedValue({ data: { count: "secondary" } }); + expect(nextFetchPolicyCallCount).toBe(3); + + client.cache.evict({ fieldName: "count" }); + + await expect(stream).toEmitMatchedValue({ data: { count: 1 } }); + expect(nextFetchPolicyCallCount).toBe(4); + expect(obs.options.fetchPolicy).toBe("cache-first"); + + await expect(stream).not.toEmitAnything(); + }); + + it("can override global defaultOptions.watchQuery.nextFetchPolicy", async () => { + let linkCount = 0; + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink( + () => + new Observable((observer) => { + observer.next({ + data: { + linkCount: ++linkCount, + }, + }); + observer.complete(); + }) + ), + defaultOptions: { + watchQuery: { + nextFetchPolicy() { + throw new Error("should not have called global nextFetchPolicy"); }, }, - }); + }, + }); - const query: TypedDocumentNode<{ - linkCount: number; - }> = gql` - query CountQuery { - linkCount - } - `; + const query: TypedDocumentNode<{ + linkCount: number; + }> = gql` + query CountQuery { + linkCount + } + `; - let fetchPolicyRecord: WatchQueryFetchPolicy[] = []; - const observable = client.watchQuery({ - query, - nextFetchPolicy(currentFetchPolicy) { - fetchPolicyRecord.push(currentFetchPolicy); - return "cache-first"; - }, - }); + let fetchPolicyRecord: WatchQueryFetchPolicy[] = []; + const observable = client.watchQuery({ + query, + nextFetchPolicy(currentFetchPolicy) { + fetchPolicyRecord.push(currentFetchPolicy); + return "cache-first"; + }, + }); - subscribeAndCount(reject, observable, (resultCount, result) => { - if (resultCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ linkCount: 1 }); - expect(fetchPolicyRecord).toEqual(["cache-first"]); - - return client - .refetchQueries({ - include: ["CountQuery"], - }) - .then((results) => { - expect(results.length).toBe(1); - results.forEach((result) => { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ linkCount: 2 }); - }); - expect(fetchPolicyRecord).toEqual([ - "cache-first", - "network-only", - ]); - }); - } else if (resultCount === 2) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ linkCount: 2 }); - expect(fetchPolicyRecord).toEqual(["cache-first", "network-only"]); - - return observable - .reobserve({ - // Allow delivery of loading:true result. - notifyOnNetworkStatusChange: true, - // Force a network request in addition to loading:true cache result. - fetchPolicy: "cache-and-network", - }) - .then((finalResult) => { - expect(finalResult.loading).toBe(false); - expect(finalResult.data).toEqual({ linkCount: 3 }); - expect(fetchPolicyRecord).toEqual([ - "cache-first", - "network-only", - "cache-and-network", - ]); - }); - } else if (resultCount === 3) { - expect(result.loading).toBe(true); - expect(result.data).toEqual({ linkCount: 2 }); - } else if (resultCount === 4) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ linkCount: 3 }); - expect(fetchPolicyRecord).toEqual([ - "cache-first", - "network-only", - "cache-and-network", - ]); - - setTimeout(resolve, 10); - } else { - reject(new Error(`Too many results (${resultCount})`)); - } - }); - } - ); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { linkCount: 1 }, + }); + expect(fetchPolicyRecord).toEqual(["cache-first"]); + + const results = await client.refetchQueries({ + include: ["CountQuery"], + }); + + expect(results).toHaveLength(1); + expect(results[0]).toMatchObject({ + loading: false, + data: { linkCount: 2 }, + }); + + expect(fetchPolicyRecord).toEqual(["cache-first", "network-only"]); + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { linkCount: 2 }, + }); + expect(fetchPolicyRecord).toEqual(["cache-first", "network-only"]); + + const finalResult = await observable.reobserve({ + // Allow delivery of loading:true result. + notifyOnNetworkStatusChange: true, + // Force a network request in addition to loading:true cache result. + fetchPolicy: "cache-and-network", + }); + + expect(finalResult.loading).toBe(false); + expect(finalResult.data).toEqual({ linkCount: 3 }); + expect(fetchPolicyRecord).toEqual([ + "cache-first", + "network-only", + "cache-and-network", + ]); + + await expect(stream).toEmitMatchedValue({ + loading: true, + data: { linkCount: 2 }, + }); + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { linkCount: 3 }, + }); + + expect(fetchPolicyRecord).toEqual([ + "cache-first", + "network-only", + "cache-and-network", + ]); + + await expect(stream).not.toEmitAnything(); + }); itAsync("allows setting default options for query", (resolve, reject) => { const errors = [{ message: "failure", name: "failure" }]; diff --git a/src/__tests__/fetchMore.ts b/src/__tests__/fetchMore.ts index 00469ad8933..9d53d363789 100644 --- a/src/__tests__/fetchMore.ts +++ b/src/__tests__/fetchMore.ts @@ -5,7 +5,6 @@ import { ApolloClient, ApolloLink, NetworkStatus, - ObservableQuery, TypedDocumentNode, } from "../core"; @@ -22,7 +21,8 @@ import { FieldMergeFunction, } from "../cache"; -import { itAsync, mockSingleLink, subscribeAndCount } from "../testing"; +import { MockedResponse, mockSingleLink } from "../testing"; +import { ObservableStream } from "../testing/internal"; describe("updateQuery on a simple query", () => { const query = gql` @@ -44,41 +44,33 @@ describe("updateQuery on a simple query", () => { }, }; - itAsync("triggers new result from updateQuery", (resolve, reject) => { - let latestResult: any = null; + it("triggers new result from updateQuery", async () => { const link = mockSingleLink({ request: { query }, result, - }).setOnError(reject); + }); const client = new ApolloClient({ link, cache: new InMemoryCache(), }); - const obsHandle = client.watchQuery({ - query, + const observable = client.watchQuery({ query }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitMatchedValue({ + data: { entry: { value: 1 } }, }); - const sub = obsHandle.subscribe({ - next(queryResult: any) { - // do nothing - latestResult = queryResult; - }, + + observable.updateQuery((prevResult: any) => { + const res = cloneDeep(prevResult); + res.entry.value = 2; + return res; }); - return new Promise((resolve) => setTimeout(resolve, 5)) - .then(() => obsHandle) - .then((watchedQuery: ObservableQuery) => { - expect(latestResult.data.entry.value).toBe(1); - watchedQuery.updateQuery((prevResult: any) => { - const res = cloneDeep(prevResult); - res.entry.value = 2; - return res; - }); - }) - .then(() => expect(latestResult.data.entry.value).toBe(2)) - .then(() => sub.unsubscribe()) - .then(resolve, reject); + await expect(stream).toEmitMatchedValue({ + data: { entry: { value: 2 } }, + }); }); }); @@ -107,45 +99,40 @@ describe("updateQuery on a query with required and optional variables", () => { }, }; - itAsync("triggers new result from updateQuery", (resolve, reject) => { - let latestResult: any = null; + it("triggers new result from updateQuery", async () => { const link = mockSingleLink({ request: { query, variables, }, result, - }).setOnError(reject); + }); const client = new ApolloClient({ link, cache: new InMemoryCache(), }); - const obsHandle = client.watchQuery({ + const observable = client.watchQuery({ query, variables, }); - const sub = obsHandle.subscribe({ - next(queryResult: any) { - // do nothing - latestResult = queryResult; - }, + + const stream = new ObservableStream(observable); + + await expect(stream).toEmitMatchedValue({ + data: { entry: { value: 1 } }, }); - return new Promise((resolve) => setTimeout(resolve, 5)) - .then(() => obsHandle) - .then((watchedQuery: ObservableQuery) => { - expect(latestResult.data.entry.value).toBe(1); - watchedQuery.updateQuery((prevResult: any) => { - const res = cloneDeep(prevResult); - res.entry.value = 2; - return res; - }); - }) - .then(() => expect(latestResult.data.entry.value).toBe(2)) - .then(() => sub.unsubscribe()) - .then(resolve, reject); + observable.updateQuery((prevResult: any) => { + const res = cloneDeep(prevResult); + res.entry.value = 2; + return res; + }); + + await expect(stream).toEmitMatchedValue({ + data: { entry: { value: 2 } }, + }); }); }); @@ -233,7 +220,7 @@ describe("fetchMore on an observable query", () => { }); } - function setup(reject: (reason: any) => any, ...mockedResponses: any[]) { + function setup(...mockedResponses: MockedResponse[]) { const link = mockSingleLink( { request: { @@ -243,7 +230,7 @@ describe("fetchMore on an observable query", () => { result, }, ...mockedResponses - ).setOnError(reject); + ); const client = new ApolloClient({ link, @@ -267,7 +254,6 @@ describe("fetchMore on an observable query", () => { } function setupWithCacheConfig( - reject: (reason: any) => any, cacheConfig: InMemoryCacheConfig, ...mockedResponses: any[] ) { @@ -281,7 +267,7 @@ describe("fetchMore on an observable query", () => { result, }, ...mockedResponses - ).setOnError(reject), + ), cache: new InMemoryCache(cacheConfig), }); @@ -292,8 +278,8 @@ describe("fetchMore on an observable query", () => { } describe("triggers new result with async new variables", () => { - itAsync("updateQuery", (resolve, reject) => { - const observable = setup(reject, { + it("updateQuery", async () => { + const observable = setup({ request: { query, variables: variablesMore, @@ -301,48 +287,52 @@ describe("fetchMore on an observable query", () => { result: resultMore, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); - - return observable - .fetchMore({ - // Rely on the fact that the original variables had limit: 10 - variables: { start: 10 }, - updateQuery: (prev, options) => { - expect(options.variables).toEqual(variablesMore); - - const state = cloneDeep(prev) as any; - state.entry.comments = [ - ...state.entry.comments, - ...options.fetchMoreResult.entry.comments, - ]; - return state; - }, - }) - .then((fetchMoreResult) => { - // This is the server result - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.data.entry.comments).toHaveLength(10); - }); - } else if (count === 2) { - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - for (let i = 1; i <= 20; i++) { - expect(combinedComments[i - 1].text).toEqual(`comment ${i}`); - } - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + // Rely on the fact that the original variables had limit: 10 + variables: { start: 10 }, + updateQuery: (prev, options) => { + expect(options.variables).toEqual(variablesMore); + + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...options.fetchMoreResult.entry.comments, + ]; + return state; + }, + }); + + // This is the server result + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreResult.data.entry.comments).toHaveLength(10); + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; + + expect(combinedComments).toHaveLength(20); + + for (let i = 1; i <= 20; i++) { + expect(combinedComments[i - 1].text).toEqual(`comment ${i}`); } - }); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync("field policy", (resolve, reject) => { + it("field policy", async () => { const observable = setupWithCacheConfig( - reject, { typePolicies: { Entry: { @@ -358,41 +348,45 @@ describe("fetchMore on an observable query", () => { } ); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); + const stream = new ObservableStream(observable); - return observable - .fetchMore({ - // Rely on the fact that the original variables had limit: 10 - variables: { start: 10 }, - }) - .then((fetchMoreResult) => { - // This is the server result - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.data.entry.comments).toHaveLength(10); - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - combinedComments.forEach((comment, i) => { - expect(comment.text).toEqual(`comment ${i + 1}`); - }); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + // Rely on the fact that the original variables had limit: 10 + variables: { start: 10 }, + }); + + // This is the server result + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreResult.data.entry.comments).toHaveLength(10); + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; + + expect(result.loading).toBe(false); + expect(combinedComments).toHaveLength(20); + + combinedComments.forEach((comment, i) => { + expect(comment.text).toEqual(`comment ${i + 1}`); + }); + } + + await expect(stream).not.toEmitAnything(); }); }); describe("basic fetchMore results merging", () => { - itAsync("updateQuery", (resolve, reject) => { - const observable = setup(reject, { + it("updateQuery", async () => { + const observable = setup({ request: { query, variables: variablesMore, @@ -400,51 +394,55 @@ describe("fetchMore on an observable query", () => { result: resultMore, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); - - return observable - .fetchMore({ - variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 - updateQuery: (prev, options) => { - expect(options.variables).toEqual(variablesMore); - const state = cloneDeep(prev) as any; - state.entry.comments = [ - ...state.entry.comments, - ...options.fetchMoreResult.entry.comments, - ]; - return state; - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult.loading).toBe(false); - const fetchMoreComments = fetchMoreResult.data.entry.comments; - expect(fetchMoreComments).toHaveLength(10); - fetchMoreComments.forEach((comment, i) => { - expect(comment.text).toEqual(`comment ${i + 11}`); - }); - }); - } else if (count === 2) { - expect(result.loading).toBe(false); - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - - combinedComments.forEach((comment, i) => { - expect(comment.text).toEqual(`comment ${i + 1}`); - }); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 + updateQuery: (prev, options) => { + expect(options.variables).toEqual(variablesMore); + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...options.fetchMoreResult.entry.comments, + ]; + return state; + }, + }); + + const fetchMoreComments = fetchMoreResult.data.entry.comments; + + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreComments).toHaveLength(10); + fetchMoreComments.forEach((comment, i) => { + expect(comment.text).toEqual(`comment ${i + 11}`); + }); + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; + + expect(result.loading).toBe(false); + expect(combinedComments).toHaveLength(20); + + combinedComments.forEach((comment, i) => { + expect(comment.text).toEqual(`comment ${i + 1}`); + }); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync("field policy", (resolve, reject) => { + it("field policy", async () => { const observable = setupWithCacheConfig( - reject, { typePolicies: { Entry: { @@ -463,34 +461,38 @@ describe("fetchMore on an observable query", () => { } ); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); + const stream = new ObservableStream(observable); - return observable - .fetchMore({ - // rely on the fact that the original variables had limit: 10 - variables: { start: 10 }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.data.entry.comments).toHaveLength(10); // this is the server result - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - combinedComments.forEach((comment, i) => { - expect(comment.text).toEqual(`comment ${i + 1}`); - }); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + // rely on the fact that the original variables had limit: 10 + variables: { start: 10 }, + }); + + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreResult.data.entry.comments).toHaveLength(10); // this is the server result + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; + + expect(result.loading).toBe(false); + expect(combinedComments).toHaveLength(20); + + combinedComments.forEach((comment, i) => { + expect(comment.text).toEqual(`comment ${i + 1}`); + }); + } + + await expect(stream).not.toEmitAnything(); }); }); @@ -594,7 +596,7 @@ describe("fetchMore on an observable query", () => { }); } - itAsync("cache-and-network", (resolve, reject) => { + it("cache-and-network", async () => { const { client, linkRequests } = makeClient(); const observable = client.watchQuery({ @@ -608,218 +610,92 @@ describe("fetchMore on an observable query", () => { expect(linkRequests.length).toBe(0); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 2), - }, - }); + const stream = new ObservableStream(observable); - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - ]); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 2), + }, + }); - observable - .fetchMore({ - variables: { - offset: 2, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(2, 4), - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 4), - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - ]); - - return observable - .fetchMore({ - variables: { - offset: 5, - limit: 3, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(5, 8), - }, - }); - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], - }, - }); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + ]); - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - { operationName: "GetTODOs", offset: 5, limit: 3 }, - ]); + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 2, + }, + }); - checkCacheExtract1234678(client.cache); + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(2, 4), + }, + }); + } - // Wait 20ms to allow unexpected results to be delivered, failing in - // the else block below. - setTimeout(resolve, 20); - } else { - reject(`too many results (${count})`); - } + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 4), + }, }); - }); - itAsync( - "cache-and-network with notifyOnNetworkStatusChange: true", - (resolve, reject) => { - const { client, linkRequests } = makeClient(); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + ]); - const observable = client.watchQuery({ - query, - fetchPolicy: "cache-and-network", - notifyOnNetworkStatusChange: true, + { + const fetchMoreResult = await observable.fetchMore({ variables: { - offset: 0, - limit: 2, + offset: 5, + limit: 3, }, }); - expect(linkRequests.length).toBe(0); + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(5, 8), + }, + }); + } - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 2), - }, - }); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], + }, + }); - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - ]); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + { operationName: "GetTODOs", offset: 5, limit: 3 }, + ]); - observable - .fetchMore({ - variables: { - offset: 2, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(2, 4), - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.fetchMore, - data: { - TODO: tasks.slice(0, 2), - }, - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 4), - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - ]); - - return observable - .fetchMore({ - variables: { - offset: 5, - limit: 3, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(5, 8), - }, - }); - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.fetchMore, - data: { - TODO: tasks.slice(0, 4), - }, - }); - } else if (count === 5) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - { operationName: "GetTODOs", offset: 5, limit: 3 }, - ]); - - checkCacheExtract1234678(client.cache); - - // Wait 20ms to allow unexpected results to be delivered, failing in - // the else block below. - setTimeout(resolve, 20); - } else { - reject(`too many results (${count})`); - } - }); - } - ); + checkCacheExtract1234678(client.cache); - itAsync("network-only", (resolve, reject) => { + await expect(stream).not.toEmitAnything(); + }); + + it("cache-and-network with notifyOnNetworkStatusChange: true", async () => { const { client, linkRequests } = makeClient(); const observable = client.watchQuery({ query, - fetchPolicy: "network-only", + fetchPolicy: "cache-and-network", + notifyOnNetworkStatusChange: true, variables: { offset: 0, limit: 2, @@ -828,390 +704,470 @@ describe("fetchMore on an observable query", () => { expect(linkRequests.length).toBe(0); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 2), - }, - }); + const stream = new ObservableStream(observable); - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - ]); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 2), + }, + }); - observable - .fetchMore({ - variables: { - offset: 2, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(2, 4), - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 4), - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - ]); - - return observable - .fetchMore({ - variables: { - offset: 5, - limit: 3, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(5, 8), - }, - }); - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], - }, - }); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + ]); - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - { operationName: "GetTODOs", offset: 5, limit: 3 }, - ]); + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 2, + }, + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(2, 4), + }, + }); + } - checkCacheExtract1234678(client.cache); + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.fetchMore, + data: { + TODO: tasks.slice(0, 2), + }, + }); - // Wait 20ms to allow unexpected results to be delivered, failing in - // the else block below. - setTimeout(resolve, 20); - } else { - reject(`too many results (${count})`); - } + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 4), + }, }); - }); - itAsync( - "network-only with notifyOnNetworkStatusChange: true", - (resolve, reject) => { - const { client, linkRequests } = makeClient(); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + ]); - const observable = client.watchQuery({ - query, - fetchPolicy: "network-only", - notifyOnNetworkStatusChange: true, + { + const fetchMoreResult = await observable.fetchMore({ variables: { - offset: 0, - limit: 2, + offset: 5, + limit: 3, }, }); - expect(linkRequests.length).toBe(0); - - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 2), - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - ]); - - observable - .fetchMore({ - variables: { - offset: 2, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(2, 4), - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.fetchMore, - data: { - TODO: tasks.slice(0, 2), - }, - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(0, 4), - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - ]); - - return observable - .fetchMore({ - variables: { - offset: 5, - limit: 3, - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: tasks.slice(5, 8), - }, - }); - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.fetchMore, - data: { - TODO: tasks.slice(0, 4), - }, - }); - } else if (count === 5) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], - }, - }); - - expect(linkRequests).toEqual([ - { operationName: "GetTODOs", offset: 0, limit: 2 }, - { operationName: "GetTODOs", offset: 2, limit: 2 }, - { operationName: "GetTODOs", offset: 5, limit: 3 }, - ]); - - checkCacheExtract1234678(client.cache); - - // Wait 20ms to allow unexpected results to be delivered, failing in - // the else block below. - setTimeout(resolve, 20); - } else { - reject(`too many results (${count})`); - } + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(5, 8), + }, }); } - ); - // itAsync("no-cache", (resolve, reject) => { - // const client = makeClient(); - // resolve(); - // }); - }); - - itAsync( - "fetchMore passes new args to field merge function", - (resolve, reject) => { - const mergeArgsHistory: (Record | null)[] = []; - const groceriesFieldPolicy = offsetLimitPagination(); - const { merge } = groceriesFieldPolicy; - groceriesFieldPolicy.merge = function (existing, incoming, options) { - mergeArgsHistory.push(options.args); - return (merge as FieldMergeFunction).call( - this, - existing, - incoming, - options - ); - }; + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.fetchMore, + data: { + TODO: tasks.slice(0, 4), + }, + }); - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - groceries: groceriesFieldPolicy, - }, - }, + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], }, }); - const query = gql` - query GroceryList($offset: Int!, $limit: Int!) { - groceries(offset: $offset, limit: $limit) { - id - item - found - } - } - `; + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + { operationName: "GetTODOs", offset: 5, limit: 3 }, + ]); - const initialVars = { - offset: 0, - limit: 2, - }; + checkCacheExtract1234678(client.cache); - const initialGroceries = [ - { - __typename: "GroceryItem", - id: 1, - item: "organic whole milk", - found: false, - }, - { - __typename: "GroceryItem", - id: 2, - item: "beer that we both like", - found: false, - }, - ]; + await expect(stream).not.toEmitAnything(); + }); - const additionalVars = { - offset: 2, - limit: 3, - }; + it("network-only", async () => { + const { client, linkRequests } = makeClient(); - const additionalGroceries = [ - { - __typename: "GroceryItem", - id: 3, - item: "gluten-free pasta", - found: false, - }, - { - __typename: "GroceryItem", - id: 4, - item: "goat cheese", - found: false, - }, - { - __typename: "GroceryItem", - id: 5, - item: "paper towels", - found: false, + const observable = client.watchQuery({ + query, + fetchPolicy: "network-only", + variables: { + offset: 0, + limit: 2, }, - ]; + }); - const finalGroceries = [...initialGroceries, ...additionalGroceries]; + expect(linkRequests.length).toBe(0); - const client = new ApolloClient({ - cache, - link: mockSingleLink( - { - request: { - query, - variables: initialVars, - }, - result: { - data: { - groceries: initialGroceries, - }, - }, + const stream = new ObservableStream(observable); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 2), + }, + }); + + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + ]); + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 2, }, - { - request: { - query, - variables: additionalVars, - }, - result: { - data: { - groceries: additionalGroceries, - }, - }, - } - ).setOnError(reject), + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(2, 4), + }, + }); + } + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 4), + }, }); + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + ]); + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 5, + limit: 3, + }, + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(5, 8), + }, + }); + } + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], + }, + }); + + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + { operationName: "GetTODOs", offset: 5, limit: 3 }, + ]); + checkCacheExtract1234678(client.cache); + + await expect(stream).not.toEmitAnything(); + }); + + it("network-only with notifyOnNetworkStatusChange: true", async () => { + const { client, linkRequests } = makeClient(); + const observable = client.watchQuery({ query, - variables: initialVars, + fetchPolicy: "network-only", + notifyOnNetworkStatusChange: true, + variables: { + offset: 0, + limit: 2, + }, + }); + + expect(linkRequests.length).toBe(0); + + const stream = new ObservableStream(observable); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 2), + }, + }); + + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + ]); + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 2, + }, + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(2, 4), + }, + }); + } + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.fetchMore, + data: { + TODO: tasks.slice(0, 2), + }, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(0, 4), + }, + }); + + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + ]); + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 5, + limit: 3, + }, + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: tasks.slice(5, 8), + }, + }); + } + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.fetchMore, + data: { + TODO: tasks.slice(0, 4), + }, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + TODO: [...tasks.slice(0, 4), ...tasks.slice(5, 8)], + }, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, + expect(linkRequests).toEqual([ + { operationName: "GetTODOs", offset: 0, limit: 2 }, + { operationName: "GetTODOs", offset: 2, limit: 2 }, + { operationName: "GetTODOs", offset: 5, limit: 3 }, + ]); + checkCacheExtract1234678(client.cache); + + await expect(stream).not.toEmitAnything(); + }); + }); + + it("fetchMore passes new args to field merge function", async () => { + const mergeArgsHistory: (Record | null)[] = []; + const groceriesFieldPolicy = offsetLimitPagination(); + const { merge } = groceriesFieldPolicy; + groceriesFieldPolicy.merge = function (existing, incoming, options) { + mergeArgsHistory.push(options.args); + return (merge as FieldMergeFunction).call( + this, + existing, + incoming, + options + ); + }; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + groceries: groceriesFieldPolicy, + }, + }, + }, + }); + + const query = gql` + query GroceryList($offset: Int!, $limit: Int!) { + groceries(offset: $offset, limit: $limit) { + id + item + found + } + } + `; + + const initialVars = { + offset: 0, + limit: 2, + }; + + const initialGroceries = [ + { + __typename: "GroceryItem", + id: 1, + item: "organic whole milk", + found: false, + }, + { + __typename: "GroceryItem", + id: 2, + item: "beer that we both like", + found: false, + }, + ]; + + const additionalVars = { + offset: 2, + limit: 3, + }; + + const additionalGroceries = [ + { + __typename: "GroceryItem", + id: 3, + item: "gluten-free pasta", + found: false, + }, + { + __typename: "GroceryItem", + id: 4, + item: "goat cheese", + found: false, + }, + { + __typename: "GroceryItem", + id: 5, + item: "paper towels", + found: false, + }, + ]; + + const finalGroceries = [...initialGroceries, ...additionalGroceries]; + + const client = new ApolloClient({ + cache, + link: mockSingleLink( + { + request: { + query, + variables: initialVars, + }, + result: { data: { groceries: initialGroceries, }, - }); + }, + }, + { + request: { + query, + variables: additionalVars, + }, + result: { + data: { + groceries: additionalGroceries, + }, + }, + } + ), + }); - expect(mergeArgsHistory).toEqual([{ offset: 0, limit: 2 }]); + const observable = client.watchQuery({ + query, + variables: initialVars, + }); - observable - .fetchMore({ - variables: { - offset: 2, - limit: 3, - }, - }) - .then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - groceries: additionalGroceries, - }, - }); + const stream = new ObservableStream(observable); - expect(observable.options.fetchPolicy).toBe("cache-first"); - }); - } else if (count === 2) { - // This result comes entirely from the cache, without updating the - // original variables for the ObservableQuery, because the - // offsetLimitPagination field policy has keyArgs:false. - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - groceries: finalGroceries, - }, - }); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + groceries: initialGroceries, + }, + }); - expect(mergeArgsHistory).toEqual([ - { offset: 0, limit: 2 }, - { offset: 2, limit: 3 }, - ]); + expect(mergeArgsHistory).toEqual([{ offset: 0, limit: 2 }]); - resolve(); - } + { + const fetchMoreResult = await observable.fetchMore({ + variables: { + offset: 2, + limit: 3, + }, }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + groceries: additionalGroceries, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync("fetching more with a different query", (resolve, reject) => { - const observable = setup(reject, { + // This result comes entirely from the cache, without updating the + // original variables for the ObservableQuery, because the + // offsetLimitPagination field policy has keyArgs:false. + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + groceries: finalGroceries, + }, + }); + + expect(mergeArgsHistory).toEqual([ + { offset: 0, limit: 2 }, + { offset: 2, limit: 3 }, + ]); + + await expect(stream).not.toEmitAnything(); + }); + + it("fetching more with a different query", async () => { + const observable = setup({ request: { query: query2, variables: variables2, @@ -1219,49 +1175,53 @@ describe("fetchMore on an observable query", () => { result: result2, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); + const stream = new ObservableStream(observable); - return observable - .fetchMore({ - query: query2, - variables: variables2, - updateQuery: (prev, options) => { - const state = cloneDeep(prev) as any; - state.entry.comments = [ - ...state.entry.comments, - ...options.fetchMoreResult.comments, - ]; - return state; - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.data.comments).toHaveLength(10); - }); - } else if (count === 2) { - expect(result.loading).toBe(false); - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); + { + const result = await stream.takeNext(); - for (let i = 1; i <= 10; i++) { - expect(combinedComments[i - 1].text).toEqual(`comment ${i}`); - } - for (let i = 11; i <= 20; i++) { - expect(combinedComments[i - 1].text).toEqual(`new comment ${i}`); - } + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + query: query2, + variables: variables2, + updateQuery: (prev, options) => { + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...options.fetchMoreResult.comments, + ]; + return state; + }, + }); + + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreResult.data.comments).toHaveLength(10); + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); + expect(result.loading).toBe(false); + expect(combinedComments).toHaveLength(20); + + for (let i = 1; i <= 10; i++) { + expect(combinedComments[i - 1].text).toEqual(`comment ${i}`); } - }); + for (let i = 11; i <= 20; i++) { + expect(combinedComments[i - 1].text).toEqual(`new comment ${i}`); + } + } + + await expect(stream).not.toEmitAnything(); }); describe("will not get an error from `fetchMore` if thrown", () => { - itAsync("updateQuery", (resolve, reject) => { + it("updateQuery", async () => { const fetchMoreError = new Error("Uh, oh!"); const link = mockSingleLink( { @@ -1287,38 +1247,26 @@ describe("fetchMore on an observable query", () => { notifyOnNetworkStatusChange: true, }); - let count = 0; - observable.subscribe({ - next: ({ data, networkStatus }) => { - switch (++count) { - case 1: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(10); - observable - .fetchMore({ - variables: { start: 10 }, - updateQuery: (prev) => { - reject(new Error("should not have called updateQuery")); - return prev; - }, - }) - .catch((e) => { - expect(e.networkError).toBe(fetchMoreError); - resolve(); - }); - break; - } - }, - error: () => { - reject(new Error("`error` called when it wasn’t supposed to be.")); - }, - complete: () => { - reject(new Error("`complete` called when it wasn’t supposed to be.")); - }, - }); + const stream = new ObservableStream(observable); + + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect(data.entry.comments.length).toBe(10); + + const error = await observable + .fetchMore({ + variables: { start: 10 }, + updateQuery: () => { + throw new Error("should not have called updateQuery"); + }, + }) + .catch((error) => error); + + expect(error.networkError).toBe(fetchMoreError); }); - itAsync("field policy", (resolve, reject) => { + it("field policy", async () => { const fetchMoreError = new Error("Uh, oh!"); const link = mockSingleLink( { @@ -1345,7 +1293,7 @@ describe("fetchMore on an observable query", () => { keyArgs: false, merge(_, incoming) { if (calledFetchMore) { - reject(new Error("should not have called merge")); + throw new Error("should not have called merge"); } return incoming; }, @@ -1362,37 +1310,25 @@ describe("fetchMore on an observable query", () => { notifyOnNetworkStatusChange: true, }); - let count = 0; - observable.subscribe({ - next: ({ data, networkStatus }) => { - switch (++count) { - case 1: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(10); - calledFetchMore = true; - observable - .fetchMore({ - variables: { start: 10 }, - }) - .catch((e) => { - expect(e.networkError).toBe(fetchMoreError); - resolve(); - }); - break; - } - }, - error: () => { - reject(new Error("`error` called when it wasn’t supposed to be.")); - }, - complete: () => { - reject(new Error("`complete` called when it wasn’t supposed to be.")); - }, - }); + const stream = new ObservableStream(observable); + + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect(data.entry.comments.length).toBe(10); + + const error = await observable + .fetchMore({ + variables: { start: 10 }, + }) + .catch((error) => error); + + expect(error.networkError).toBe(fetchMoreError); }); }); - itAsync("will not leak fetchMore query", (resolve, reject) => { - const observable = setup(reject, { + it("will not leak fetchMore query", async () => { + const observable = setup({ request: { query, variables: variablesMore, @@ -1406,95 +1342,93 @@ describe("fetchMore on an observable query", () => { const beforeQueryCount = count(); - observable - .fetchMore({ - variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 - }) - .then(() => { - expect(count()).toBe(beforeQueryCount); - }) - .then(resolve, reject); + await observable.fetchMore({ + variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 + }); + + expect(count()).toBe(beforeQueryCount); }); - itAsync( - "delivers all loading states even if data unchanged", - (resolve, reject) => { - type TEmptyItems = { - emptyItems: Array<{ - text: string; - }>; - }; + it("delivers all loading states even if data unchanged", async () => { + type TEmptyItems = { + emptyItems: Array<{ + text: string; + }>; + }; - const query: TypedDocumentNode = gql` - query GetNothing { - emptyItems { - text - } + const query: TypedDocumentNode = gql` + query GetNothing { + emptyItems { + text } - `; + } + `; - const variables = {}; + const variables = {}; - const emptyItemsMock = { - request: { - query, - variables, - }, - result: { - data: { - emptyItems: [], - }, + const emptyItemsMock = { + request: { + query, + variables, + }, + result: { + data: { + emptyItems: [], }, - }; + }, + }; - const link = mockSingleLink( - emptyItemsMock, - emptyItemsMock, - emptyItemsMock - ).setOnError(reject); + const link = mockSingleLink(emptyItemsMock, emptyItemsMock, emptyItemsMock); - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); - const observable = client.watchQuery({ - query, - variables, - notifyOnNetworkStatusChange: true, - }); + const observable = client.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: true, + }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.data.emptyItems).toHaveLength(0); + const stream = new ObservableStream(observable); - return observable - .fetchMore({ - variables, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.networkStatus).toBe(NetworkStatus.ready); - expect(fetchMoreResult.data.emptyItems).toHaveLength(0); - }); - } else if (count === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.fetchMore); - expect(result.data.emptyItems).toHaveLength(0); - } else if (count === 3) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.data.emptyItems).toHaveLength(0); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); - } - ); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + emptyItems: [], + }, + }); + + const fetchMoreResult = await observable.fetchMore({ + variables, + }); + + expect(fetchMoreResult).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { emptyItems: [] }, + }); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.fetchMore, + data: { + emptyItems: [], + }, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + emptyItems: [], + }, + }); + + await expect(stream).not.toEmitAnything(); + }); }); describe("fetchMore on an observable query with connection", () => { @@ -1567,7 +1501,7 @@ describe("fetchMore on an observable query with connection", () => { }); } - function setup(reject: (reason: any) => any, ...mockedResponses: any[]) { + function setup(...mockedResponses: MockedResponse[]) { const link = mockSingleLink( { request: { @@ -1577,7 +1511,7 @@ describe("fetchMore on an observable query with connection", () => { result, }, ...mockedResponses - ).setOnError(reject); + ); const client = new ApolloClient({ link, @@ -1601,7 +1535,6 @@ describe("fetchMore on an observable query with connection", () => { } function setupWithCacheConfig( - reject: (reason: any) => any, cacheConfig: InMemoryCacheConfig, ...mockedResponses: any[] ) { @@ -1615,7 +1548,7 @@ describe("fetchMore on an observable query with connection", () => { result, }, ...mockedResponses - ).setOnError(reject), + ), cache: new InMemoryCache(cacheConfig), }); @@ -1626,8 +1559,8 @@ describe("fetchMore on an observable query with connection", () => { } describe("fetchMore with connection results merging", () => { - itAsync("updateQuery", (resolve, reject) => { - const observable = setup(reject, { + it("updateQuery", async () => { + const observable = setup({ request: { query: transformedQuery, variables: variablesMore, @@ -1635,44 +1568,47 @@ describe("fetchMore on an observable query with connection", () => { result: resultMore, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); - - return observable - .fetchMore({ - variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 - updateQuery: (prev, options) => { - const state = cloneDeep(prev) as any; - state.entry.comments = [ - ...state.entry.comments, - ...options.fetchMoreResult.entry.comments, - ]; - return state; - }, - }) - .then((fetchMoreResult) => { - expect(fetchMoreResult.data.entry.comments).toHaveLength(10); - expect(fetchMoreResult.loading).toBe(false); - }); - } else if (count === 2) { - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - combinedComments.forEach((comment, i) => { - expect(comment.text).toBe(`comment ${i + 1}`); - }); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + variables: { start: 10 }, // rely on the fact that the original variables had limit: 10 + updateQuery: (prev, options) => { + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...options.fetchMoreResult.entry.comments, + ]; + return state; + }, + }); + + expect(fetchMoreResult.data.entry.comments).toHaveLength(10); + expect(fetchMoreResult.loading).toBe(false); + } + + { + const result = await stream.takeNext(); + const combinedComments = result.data.entry.comments; + + expect(combinedComments).toHaveLength(20); + combinedComments.forEach((comment, i) => { + expect(comment.text).toBe(`comment ${i + 1}`); + }); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync("field policy", (resolve, reject) => { + it("field policy", async () => { const observable = setupWithCacheConfig( - reject, { typePolicies: { Entry: { @@ -1691,38 +1627,42 @@ describe("fetchMore on an observable query with connection", () => { } ); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.entry.comments).toHaveLength(10); + const stream = new ObservableStream(observable); - return observable - .fetchMore({ - // rely on the fact that the original variables had limit: 10 - variables: { start: 10 }, - }) - .then((fetchMoreResult) => { - // this is the server result - expect(fetchMoreResult.loading).toBe(false); - expect(fetchMoreResult.data.entry.comments).toHaveLength(10); - }); - } else if (count === 2) { - const combinedComments = result.data.entry.comments; - expect(combinedComments).toHaveLength(20); - combinedComments.forEach((comment, i) => { - expect(comment.text).toBe(`comment ${i + 1}`); - }); - - setTimeout(resolve, 10); - } else { - reject(`Too many results (${JSON.stringify({ count, result })})`); - } - }); + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data.entry.comments).toHaveLength(10); + } + + { + const fetchMoreResult = await observable.fetchMore({ + // rely on the fact that the original variables had limit: 10 + variables: { start: 10 }, + }); + + // this is the server result + expect(fetchMoreResult.loading).toBe(false); + expect(fetchMoreResult.data.entry.comments).toHaveLength(10); + } + + { + const result = await stream.takeNext(); + + const combinedComments = result.data.entry.comments; + expect(combinedComments).toHaveLength(20); + combinedComments.forEach((comment, i) => { + expect(comment.text).toBe(`comment ${i + 1}`); + }); + } + + await expect(stream).not.toEmitAnything(); }); }); describe("will set the network status to `fetchMore`", () => { - itAsync("updateQuery", (resolve, reject) => { + it("updateQuery", async () => { const link = mockSingleLink( { request: { query: transformedQuery, variables }, @@ -1734,7 +1674,7 @@ describe("fetchMore on an observable query with connection", () => { result: resultMore, delay: 5, } - ).setOnError(reject); + ); const client = new ApolloClient({ link, @@ -1747,44 +1687,45 @@ describe("fetchMore on an observable query with connection", () => { notifyOnNetworkStatusChange: true, }); - let count = 0; - observable.subscribe({ - next: ({ data, networkStatus }) => { - switch (count++) { - case 0: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(10); - observable.fetchMore({ - variables: { start: 10 }, - updateQuery: (prev: any, options: any) => { - const state = cloneDeep(prev) as any; - state.entry.comments = [ - ...state.entry.comments, - ...options.fetchMoreResult.entry.comments, - ]; - return state; - }, - }); - break; - case 1: - expect(networkStatus).toBe(NetworkStatus.fetchMore); - expect((data as any).entry.comments.length).toBe(10); - break; - case 2: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(20); - setTimeout(resolve, 10); - break; - default: - reject(new Error("`next` called too many times")); - } + const stream = new ObservableStream(observable); + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect(data.entry.comments.length).toBe(10); + } + + void observable.fetchMore({ + variables: { start: 10 }, + updateQuery: (prev: any, options: any) => { + const state = cloneDeep(prev) as any; + state.entry.comments = [ + ...state.entry.comments, + ...options.fetchMoreResult.entry.comments, + ]; + return state; }, - error: (error: any) => reject(error), - complete: () => reject(new Error("Should not have completed")), }); + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.fetchMore); + expect(data.entry.comments.length).toBe(10); + } + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect((data as any).entry.comments.length).toBe(20); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync("field policy", (resolve, reject) => { + it("field policy", async () => { const link = mockSingleLink( { request: { query: transformedQuery, variables }, @@ -1796,7 +1737,7 @@ describe("fetchMore on an observable query with connection", () => { result: resultMore, delay: 5, } - ).setOnError(reject); + ); const client = new ApolloClient({ link, @@ -1817,33 +1758,34 @@ describe("fetchMore on an observable query with connection", () => { notifyOnNetworkStatusChange: true, }); - let count = 0; - observable.subscribe({ - next: ({ data, networkStatus }) => { - switch (count++) { - case 0: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(10); - observable.fetchMore({ - variables: { start: 10 }, - }); - break; - case 1: - expect(networkStatus).toBe(NetworkStatus.fetchMore); - expect((data as any).entry.comments.length).toBe(10); - break; - case 2: - expect(networkStatus).toBe(NetworkStatus.ready); - expect((data as any).entry.comments.length).toBe(20); - setTimeout(resolve, 10); - break; - default: - reject(new Error("`next` called too many times")); - } - }, - error: (error: any) => reject(error), - complete: () => reject(new Error("Should not have completed")), + const stream = new ObservableStream(observable); + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect((data as any).entry.comments.length).toBe(10); + } + + void observable.fetchMore({ + variables: { start: 10 }, }); + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.fetchMore); + expect(data.entry.comments.length).toBe(10); + } + + { + const { data, networkStatus } = await stream.takeNext(); + + expect(networkStatus).toBe(NetworkStatus.ready); + expect(data.entry.comments.length).toBe(20); + } + + await expect(stream).not.toEmitAnything(); }); }); }); diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts index 7f140dc17a0..0341220b32d 100644 --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -2,15 +2,15 @@ import { cloneDeep } from "lodash"; import gql from "graphql-tag"; import { GraphQLError } from "graphql"; -import { ApolloClient, FetchResult } from "../core"; +import { ApolloClient, ApolloError, FetchResult } from "../core"; import { InMemoryCache } from "../cache"; import { ApolloLink } from "../link/core"; import { Observable, ObservableSubscription as Subscription, } from "../utilities"; -import { itAsync, subscribeAndCount, mockSingleLink } from "../testing"; -import { spyOnConsole } from "../testing/internal"; +import { MockedResponse, mockSingleLink } from "../testing"; +import { ObservableStream, spyOnConsole } from "../testing/internal"; describe("mutation results", () => { const query = gql` @@ -120,10 +120,7 @@ describe("mutation results", () => { }, }; - function setupObsQuery( - reject: (reason: any) => any, - ...mockedResponses: any[] - ) { + function setupObsQuery(...mockedResponses: MockedResponse[]) { const client = new ApolloClient({ link: mockSingleLink( { @@ -153,11 +150,7 @@ describe("mutation results", () => { }; } - function setupDelayObsQuery( - reject: (reason: any) => any, - delay: number, - ...mockedResponses: any[] - ) { + function setupDelayObsQuery(delay: number, ...mockedResponses: any[]) { const client = new ApolloClient({ link: mockSingleLink( { @@ -166,7 +159,9 @@ describe("mutation results", () => { delay, }, ...mockedResponses - ).setOnError(reject), + ).setOnError((error) => { + throw error; + }), cache: new InMemoryCache({ dataIdFromObject: (obj: any) => { if (obj.id && obj.__typename) { @@ -188,249 +183,216 @@ describe("mutation results", () => { }; } - itAsync("correctly primes cache for tests", (resolve, reject) => { - const { client, obsQuery } = setupObsQuery(reject); - return obsQuery - .result() - .then(() => client.query({ query })) - .then(resolve, reject); + it("correctly primes cache for tests", async () => { + const { client, obsQuery } = setupObsQuery(); + + await obsQuery.result().then(() => client.query({ query })); }); - itAsync( - "correctly integrates field changes by default", - (resolve, reject) => { - const mutation = gql` - mutation setCompleted { - setCompleted(todoId: "3") { - id - completed - __typename - } + it("correctly integrates field changes by default", async () => { + const mutation = gql` + mutation setCompleted { + setCompleted(todoId: "3") { + id + completed __typename } - `; + __typename + } + `; - const mutationResult = { - data: { - __typename: "Mutation", - setCompleted: { - __typename: "Todo", - id: "3", - completed: true, - }, + const mutationResult = { + data: { + __typename: "Mutation", + setCompleted: { + __typename: "Todo", + id: "3", + completed: true, }, - }; + }, + }; - const { client, obsQuery } = setupObsQuery(reject, { - request: { query: mutation }, - result: mutationResult, - }); + const { client, obsQuery } = setupObsQuery({ + request: { query: mutation }, + result: mutationResult, + }); - return obsQuery - .result() - .then(() => { - return client.mutate({ mutation }); - }) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - expect(newResult.data.todoList.todos[0].completed).toBe(true); - }) - .then(resolve, reject); - } - ); - - itAsync( - "correctly integrates field changes by default with variables", - (resolve, reject) => { - const query = gql` - query getMini($id: ID!) { - mini(id: $id) { - id - cover(maxWidth: 600, maxHeight: 400) - __typename - } + await obsQuery.result(); + await client.mutate({ mutation }); + const newResult = await client.query({ query }); + expect(newResult.data.todoList.todos[0].completed).toBe(true); + }); + + it("correctly integrates field changes by default with variables", async () => { + const query = gql` + query getMini($id: ID!) { + mini(id: $id) { + id + cover(maxWidth: 600, maxHeight: 400) + __typename } - `; - const mutation = gql` - mutation upload($signature: String!) { - mini: submitMiniCoverS3DirectUpload(signature: $signature) { - id - cover(maxWidth: 600, maxHeight: 400) - __typename - } + } + `; + const mutation = gql` + mutation upload($signature: String!) { + mini: submitMiniCoverS3DirectUpload(signature: $signature) { + id + cover(maxWidth: 600, maxHeight: 400) + __typename } - `; + } + `; - const link = mockSingleLink( - { - request: { - query, - variables: { id: 1 }, - } as any, - delay: 100, - result: { - data: { mini: { id: 1, cover: "image", __typename: "Mini" } }, - }, + const link = mockSingleLink( + { + request: { + query, + variables: { id: 1 }, + } as any, + delay: 100, + result: { + data: { mini: { id: 1, cover: "image", __typename: "Mini" } }, + }, + }, + { + request: { + query: mutation, + variables: { signature: "1234" }, + } as any, + delay: 150, + result: { + data: { mini: { id: 1, cover: "image2", __typename: "Mini" } }, }, - { - request: { - query: mutation, - variables: { signature: "1234" }, - } as any, - delay: 150, - result: { - data: { mini: { id: 1, cover: "image2", __typename: "Mini" } }, - }, - } - ).setOnError(reject); - - interface Data { - mini: { id: number; cover: string; __typename: string }; } - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ - dataIdFromObject: (obj: any) => { - if (obj.id && obj.__typename) { - return obj.__typename + obj.id; - } - return null; - }, - }), - }); - - const obs = client.watchQuery({ - query, - variables: { id: 1 }, - notifyOnNetworkStatusChange: false, - }); + ); - let count = 0; - obs.subscribe({ - next: (result) => { - if (count === 0) { - client.mutate({ mutation, variables: { signature: "1234" } }); - expect(result.data!.mini.cover).toBe("image"); - - setTimeout(() => { - if (count === 0) - reject( - new Error("mutate did not re-call observable with next value") - ); - }, 250); - } - if (count === 1) { - expect(result.data!.mini.cover).toBe("image2"); - resolve(); + interface Data { + mini: { id: number; cover: string; __typename: string }; + } + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + dataIdFromObject: (obj: any) => { + if (obj.id && obj.__typename) { + return obj.__typename + obj.id; } - count++; + return null; }, - error: reject, - }); + }), + }); + + const obs = client.watchQuery({ + query, + variables: { id: 1 }, + notifyOnNetworkStatusChange: false, + }); + + const stream = new ObservableStream(obs); + { + const result = await stream.takeNext(); + expect(result.data!.mini.cover).toBe("image"); + } + await client.mutate({ mutation, variables: { signature: "1234" } }); + { + const result = await stream.takeNext(); + expect(result.data!.mini.cover).toBe("image2"); } - ); + }); - itAsync( - "should write results to cache according to errorPolicy", - async (resolve, reject) => { - const expectedFakeError = new GraphQLError("expected/fake error"); + it("should write results to cache according to errorPolicy", async () => { + const expectedFakeError = new GraphQLError("expected/fake error"); - const client = new ApolloClient({ - cache: new InMemoryCache({ - typePolicies: { - Person: { - keyFields: ["name"], - }, + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Person: { + keyFields: ["name"], }, - }), + }, + }), - link: new ApolloLink( - (operation) => - new Observable((observer) => { - observer.next({ - errors: [expectedFakeError], - data: { - newPerson: { - __typename: "Person", - name: operation.variables.newName, - }, + link: new ApolloLink( + (operation) => + new Observable((observer) => { + observer.next({ + errors: [expectedFakeError], + data: { + newPerson: { + __typename: "Person", + name: operation.variables.newName, }, - }); - observer.complete(); - }) - ).setOnError(reject), - }); + }, + }); + observer.complete(); + }) + ), + }); - const mutation = gql` - mutation AddNewPerson($newName: String!) { - newPerson(name: $newName) { - name - } + const mutation = gql` + mutation AddNewPerson($newName: String!) { + newPerson(name: $newName) { + name } - `; - - await client - .mutate({ - mutation, - variables: { - newName: "Hugh Willson", - }, - }) - .then( - () => { - reject("should have thrown for default errorPolicy"); - }, - (error) => { - expect(error.message).toBe(expectedFakeError.message); - } - ); - - expect(client.cache.extract()).toMatchSnapshot(); + } + `; - const ignoreErrorsResult = await client.mutate({ + await client + .mutate({ mutation, - errorPolicy: "ignore", variables: { - newName: "Jenn Creighton", + newName: "Hugh Willson", }, - }); - - expect(ignoreErrorsResult).toEqual({ - data: { - newPerson: { - __typename: "Person", - name: "Jenn Creighton", - }, + }) + .then( + () => { + throw new Error("should have thrown for default errorPolicy"); }, - }); + (error) => { + expect(error.message).toBe(expectedFakeError.message); + } + ); - expect(client.cache.extract()).toMatchSnapshot(); + expect(client.cache.extract()).toMatchSnapshot(); - const allErrorsResult = await client.mutate({ - mutation, - errorPolicy: "all", - variables: { - newName: "Ellen Shapiro", - }, - }); + const ignoreErrorsResult = await client.mutate({ + mutation, + errorPolicy: "ignore", + variables: { + newName: "Jenn Creighton", + }, + }); - expect(allErrorsResult).toEqual({ - data: { - newPerson: { - __typename: "Person", - name: "Ellen Shapiro", - }, + expect(ignoreErrorsResult).toEqual({ + data: { + newPerson: { + __typename: "Person", + name: "Jenn Creighton", }, - errors: [expectedFakeError], - }); + }, + }); - expect(client.cache.extract()).toMatchSnapshot(); + expect(client.cache.extract()).toMatchSnapshot(); - resolve(); - } - ); + const allErrorsResult = await client.mutate({ + mutation, + errorPolicy: "all", + variables: { + newName: "Ellen Shapiro", + }, + }); + + expect(allErrorsResult).toEqual({ + data: { + newPerson: { + __typename: "Person", + name: "Ellen Shapiro", + }, + }, + errors: [expectedFakeError], + }); + + expect(client.cache.extract()).toMatchSnapshot(); + }); it("should warn when the result fields don't match the query fields", async () => { using _consoleSpies = spyOnConsole.takeSnapshots("error"); @@ -484,7 +446,6 @@ describe("mutation results", () => { }; const { client, obsQuery } = setupObsQuery( - reject, { request: { query: queryTodos }, result: queryTodosResult, @@ -807,14 +768,14 @@ describe("mutation results", () => { }, }; - itAsync("analogous of ARRAY_INSERT", (resolve, reject) => { + it("analogous of ARRAY_INSERT", async () => { let subscriptionHandle: Subscription; - const { client, obsQuery } = setupObsQuery(reject, { + const { client, obsQuery } = setupObsQuery({ request: { query: mutation }, result: mutationResult, }); - return obsQuery + await obsQuery .result() .then(() => { // we have to actually subscribe to the query to be able to update it @@ -855,357 +816,308 @@ describe("mutation results", () => { expect(newResult.data.todoList.todos[0].text).toBe( "This one was created with a mutation." ); - }) - .then(resolve, reject); + }); }); - itAsync( - "does not fail if optional query variables are not supplied", - (resolve, reject) => { - let subscriptionHandle: Subscription; - const mutationWithVars = gql` - mutation createTodo($requiredVar: String!, $optionalVar: String) { - createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { - id - text - completed - __typename - } + it("does not fail if optional query variables are not supplied", async () => { + const mutationWithVars = gql` + mutation createTodo($requiredVar: String!, $optionalVar: String) { + createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { + id + text + completed __typename } - `; - - // the test will pass if optionalVar is uncommented - const variables = { - requiredVar: "x", - // optionalVar: 'y', - }; - const { client, obsQuery } = setupObsQuery(reject, { - request: { - query: mutationWithVars, - variables, - }, - result: mutationResult, - }); + __typename + } + `; - return obsQuery - .result() - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve) => { - const handle = client.watchQuery({ - query, - variables, - }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => - client.mutate({ - mutation: mutationWithVars, - variables, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; - }, - }, - }) - ) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + // the test will pass if optionalVar is uncommented + const variables = { + requiredVar: "x", + // optionalVar: 'y', + }; + const { client, obsQuery } = setupObsQuery({ + request: { + query: mutationWithVars, + variables, + }, + result: mutationResult, + }); + + await obsQuery.result(); - // There should be one more todo item than before - expect(newResult.data.todoList.todos.length).toBe(4); + // we have to actually subscribe to the query to be able to update it - // Since we used `prepend` it should be at the front - expect(newResult.data.todoList.todos[0].text).toBe( + const handle = client.watchQuery({ + query, + variables, + }); + const stream = new ObservableStream(handle); + await stream.takeNext(); + + await client.mutate({ + mutation: mutationWithVars, + variables, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( "This one was created with a mutation." ); - }) - .then(resolve, reject); - } - ); + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, + }, + }); + const newResult = await client.query({ query }); - itAsync( - "does not fail if the query did not complete correctly", - (resolve, reject) => { - const { client, obsQuery } = setupObsQuery(reject, { - request: { query: mutation }, - result: mutationResult, - }); - const subs = obsQuery.subscribe({ - next: () => null, - }); - // Cancel the query right away! - subs.unsubscribe(); - return client - .mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); + // There should be one more todo item than before + expect(newResult.data.todoList.todos.length).toBe(4); - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; - }, - }, - }) - .then(resolve, reject); - } - ); + // Since we used `prepend` it should be at the front + expect(newResult.data.todoList.todos[0].text).toBe( + "This one was created with a mutation." + ); + }); - itAsync( - "does not fail if the query did not finish loading", - (resolve, reject) => { - const { client, obsQuery } = setupDelayObsQuery(reject, 15, { - request: { query: mutation }, - result: mutationResult, - }); - obsQuery.subscribe({ - next: () => null, - }); - return client - .mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); + it("does not fail if the query did not complete correctly", async () => { + const { client, obsQuery } = setupObsQuery({ + request: { query: mutation }, + result: mutationResult, + }); + const subs = obsQuery.subscribe({ + next: () => null, + }); + // Cancel the query right away! + subs.unsubscribe(); - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; - }, - }, - }) - .then(resolve, reject); - } - ); + await client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( + "This one was created with a mutation." + ); - itAsync( - "does not make next queries fail if a mutation fails", - (resolve, reject) => { - const { client, obsQuery } = setupObsQuery( - (error) => { - throw error; - }, - { - request: { query: mutation }, - result: { errors: [new Error("mock error")] }, + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; }, - { - request: { query: queryWithTypename }, - result, - } - ); + }, + }); + }); - obsQuery.subscribe({ - next() { - client - .mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - const state = cloneDeep(prev) as any; - // It's unfortunate that this function is called at all, but we are removing - // the updateQueries API soon so it won't matter. - state.todoList.todos.unshift( - mResult.data && mResult.data.createTodo - ); - return state; - }, - }, - }) - .then( - () => reject(new Error("Mutation should have failed")), - () => - client.mutate({ - mutation, - updateQueries: { - todoList: (prev, options) => { - const mResult = options.mutationResult as any; - const state = cloneDeep(prev) as any; - state.todoList.todos.unshift(mResult.data.createTodo); - return state; - }, - }, - }) - ) - .then( - () => reject(new Error("Mutation should have failed")), - () => obsQuery.refetch() - ) - .then(resolve, reject); + it("does not fail if the query did not finish loading", async () => { + const { client, obsQuery } = setupDelayObsQuery(15, { + request: { query: mutation }, + result: mutationResult, + }); + obsQuery.subscribe({ + next: () => null, + }); + await client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( + "This one was created with a mutation." + ); + + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; }, - }); - } - ); + }, + }); + }); - itAsync("error handling in reducer functions", (resolve, reject) => { - let subscriptionHandle: Subscription; - const { client, obsQuery } = setupObsQuery(reject, { + it("does not make next queries fail if a mutation fails", async () => { + const { client, obsQuery } = setupObsQuery( + { + request: { query: mutation }, + result: { errors: [new Error("mock error")] }, + }, + { + request: { query: queryWithTypename }, + result, + } + ); + const stream = new ObservableStream(obsQuery); + await stream.takeNext(); + + await expect(() => + client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + const state = cloneDeep(prev) as any; + // It's unfortunate that this function is called at all, but we are removing + // the updateQueries API soon so it won't matter. + state.todoList.todos.unshift( + mResult.data && mResult.data.createTodo + ); + return state; + }, + }, + }) + ).rejects.toThrow(); + + await expect(() => + client.mutate({ + mutation, + updateQueries: { + todoList: (prev, options) => { + const mResult = options.mutationResult as any; + const state = cloneDeep(prev) as any; + state.todoList.todos.unshift(mResult.data.createTodo); + return state; + }, + }, + }) + ).rejects.toThrow(); + await obsQuery.refetch(); + }); + + it("error handling in reducer functions", async () => { + const { client, obsQuery } = setupObsQuery({ request: { query: mutation }, result: mutationResult, }); - return obsQuery - .result() - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => - client.mutate({ - mutation, - updateQueries: { - todoList: () => { - throw new Error(`Hello... It's me.`); - }, + await obsQuery.result(); + + // we have to actually subscribe to the query to be able to update it + + const handle = client.watchQuery({ query }); + const stream = new ObservableStream(handle); + await stream.takeNext(); + + await expect(() => + client.mutate({ + mutation, + updateQueries: { + todoList: () => { + throw new Error(`Hello... It's me.`); }, - }) - ) - .then( - () => { - subscriptionHandle.unsubscribe(); - reject("should have thrown"); }, - (error) => { - subscriptionHandle.unsubscribe(); - expect(error.message).toBe(`Hello... It's me.`); - } - ) - .then(resolve, reject); + }) + ).rejects.toThrow( + new ApolloError({ networkError: Error(`Hello... It's me.`) }) + ); }); }); - itAsync( - "does not fail if one of the previous queries did not complete correctly", - (resolve, reject) => { - const variableQuery = gql` - query Echo($message: String) { - echo(message: $message) - } - `; + it("does not fail if one of the previous queries did not complete correctly", async () => { + const variableQuery = gql` + query Echo($message: String) { + echo(message: $message) + } + `; - const variables1 = { - message: "a", - }; + const variables1 = { + message: "a", + }; - const result1 = { - data: { - echo: "a", - }, - }; + const result1 = { + data: { + echo: "a", + }, + }; - const variables2 = { - message: "b", - }; + const variables2 = { + message: "b", + }; - const result2 = { - data: { - echo: "b", - }, - }; + const result2 = { + data: { + echo: "b", + }, + }; - const resetMutation = gql` - mutation Reset { - reset { - echo - } + const resetMutation = gql` + mutation Reset { + reset { + echo } - `; + } + `; - const resetMutationResult = { - data: { - reset: { - echo: "0", - }, + const resetMutationResult = { + data: { + reset: { + echo: "0", }, - }; + }, + }; - const client = new ApolloClient({ - link: mockSingleLink( - { - request: { query: variableQuery, variables: variables1 } as any, - result: result1, - }, - { - request: { query: variableQuery, variables: variables2 } as any, - result: result2, - }, - { - request: { query: resetMutation } as any, - result: resetMutationResult, - } - ).setOnError(reject), - cache: new InMemoryCache({ addTypename: false }), - }); + const client = new ApolloClient({ + link: mockSingleLink( + { + request: { query: variableQuery, variables: variables1 } as any, + result: result1, + }, + { + request: { query: variableQuery, variables: variables2 } as any, + result: result2, + }, + { + request: { query: resetMutation } as any, + result: resetMutationResult, + } + ), + cache: new InMemoryCache({ addTypename: false }), + }); - const watchedQuery = client.watchQuery({ - query: variableQuery, - variables: variables1, - }); + const watchedQuery = client.watchQuery({ + query: variableQuery, + variables: variables1, + }); - const firstSubs = watchedQuery.subscribe({ - next: () => null, - error: reject, - }); + const firstSubs = watchedQuery.subscribe({ + next: () => null, + error: (error) => { + throw error; + }, + }); - // Cancel the query right away! - firstSubs.unsubscribe(); + // Cancel the query right away! + firstSubs.unsubscribe(); - subscribeAndCount(reject, watchedQuery, (count, result) => { - if (count === 1) { - expect(result.data).toEqual({ echo: "b" }); - client.mutate({ - mutation: resetMutation, - updateQueries: { - Echo: () => { - return { echo: "0" }; - }, - }, - }); - } else if (count === 2) { - expect(result.data).toEqual({ echo: "0" }); - resolve(); - } - }); + const stream = new ObservableStream(watchedQuery); + + await watchedQuery.refetch(variables2); - watchedQuery.refetch(variables2); + { + const result = await stream.takeNext(); + + expect(result.data).toEqual({ echo: "b" }); + } + + await client.mutate({ + mutation: resetMutation, + updateQueries: { + Echo: () => { + return { echo: "0" }; + }, + }, + }); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual({ echo: "0" }); } - ); + }); - itAsync("allows mutations with optional arguments", (resolve, reject) => { + it("allows mutations with optional arguments", async () => { let count = 0; const client = new ApolloClient({ @@ -1252,7 +1164,7 @@ describe("mutation results", () => { } `; - Promise.all([ + const results = await Promise.all([ client.mutate({ mutation, variables: { a: 1, b: 2 }, @@ -1268,24 +1180,21 @@ describe("mutation results", () => { client.mutate({ mutation, }), - ]) - .then((results) => { - expect(client.cache.extract()).toEqual({ - ROOT_MUTATION: { - __typename: "Mutation", - }, - }); - expect(results).toEqual([ - { data: { result: "hello" } }, - { data: { result: "world" } }, - { data: { result: "goodbye" } }, - { data: { result: "moon" } }, - ]); - }) - .then(resolve, reject); + ]); + expect(client.cache.extract()).toEqual({ + ROOT_MUTATION: { + __typename: "Mutation", + }, + }); + expect(results).toEqual([ + { data: { result: "hello" } }, + { data: { result: "world" } }, + { data: { result: "goodbye" } }, + { data: { result: "moon" } }, + ]); }); - itAsync("allows mutations with default values", (resolve, reject) => { + it("allows mutations with default values", async () => { let count = 0; const client = new ApolloClient({ @@ -1334,7 +1243,7 @@ describe("mutation results", () => { } `; - Promise.all([ + const results = await Promise.all([ client.mutate({ mutation, variables: { a: 1, b: "water" }, @@ -1347,103 +1256,94 @@ describe("mutation results", () => { mutation, variables: { c: 3 }, }), - ]) - .then((results) => { - expect(client.cache.extract()).toEqual({ - ROOT_MUTATION: { - __typename: "Mutation", - }, - }); - expect(results).toEqual([ - { data: { result: "hello" } }, - { data: { result: "world" } }, - { data: { result: "goodbye" } }, - ]); - }) - .then(resolve, reject); + ]); + expect(client.cache.extract()).toEqual({ + ROOT_MUTATION: { + __typename: "Mutation", + }, + }); + expect(results).toEqual([ + { data: { result: "hello" } }, + { data: { result: "world" } }, + { data: { result: "goodbye" } }, + ]); }); - itAsync( - "will pass null to the network interface when provided", - (resolve, reject) => { - let count = 0; + it("will pass null to the network interface when provided", async () => { + let count = 0; - const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), - link: ApolloLink.from([ - ({ variables }: any) => - new Observable((observer) => { - switch (count++) { - case 0: - expect(variables).toEqual({ - a: 1, - b: 2, - c: null, - }); - observer.next({ data: { result: "hello" } }); - observer.complete(); - return; - case 1: - expect(variables).toEqual({ - a: 1, - b: null, - c: 3, - }); - observer.next({ data: { result: "world" } }); - observer.complete(); - return; - case 2: - expect(variables).toEqual({ - a: null, - b: null, - c: null, - }); - observer.next({ data: { result: "moon" } }); - observer.complete(); - return; - default: - observer.error(new Error("Too many network calls.")); - return; - } - }), - ] as any), - }); + const client = new ApolloClient({ + cache: new InMemoryCache({ addTypename: false }), + link: ApolloLink.from([ + ({ variables }: any) => + new Observable((observer) => { + switch (count++) { + case 0: + expect(variables).toEqual({ + a: 1, + b: 2, + c: null, + }); + observer.next({ data: { result: "hello" } }); + observer.complete(); + return; + case 1: + expect(variables).toEqual({ + a: 1, + b: null, + c: 3, + }); + observer.next({ data: { result: "world" } }); + observer.complete(); + return; + case 2: + expect(variables).toEqual({ + a: null, + b: null, + c: null, + }); + observer.next({ data: { result: "moon" } }); + observer.complete(); + return; + default: + observer.error(new Error("Too many network calls.")); + return; + } + }), + ] as any), + }); - const mutation = gql` - mutation ($a: Int!, $b: Int, $c: Int) { - result(a: $a, b: $b, c: $c) - } - `; + const mutation = gql` + mutation ($a: Int!, $b: Int, $c: Int) { + result(a: $a, b: $b, c: $c) + } + `; - Promise.all([ - client.mutate({ - mutation, - variables: { a: 1, b: 2, c: null }, - }), - client.mutate({ - mutation, - variables: { a: 1, b: null, c: 3 }, - }), - client.mutate({ - mutation, - variables: { a: null, b: null, c: null }, - }), - ]) - .then((results) => { - expect(client.cache.extract()).toEqual({ - ROOT_MUTATION: { - __typename: "Mutation", - }, - }); - expect(results).toEqual([ - { data: { result: "hello" } }, - { data: { result: "world" } }, - { data: { result: "moon" } }, - ]); - }) - .then(resolve, reject); - } - ); + const results = await Promise.all([ + client.mutate({ + mutation, + variables: { a: 1, b: 2, c: null }, + }), + client.mutate({ + mutation, + variables: { a: 1, b: null, c: 3 }, + }), + client.mutate({ + mutation, + variables: { a: null, b: null, c: null }, + }), + ]); + expect(client.cache.extract()).toEqual({ + ROOT_MUTATION: { + __typename: "Mutation", + }, + }); + expect(results).toEqual([ + { data: { result: "hello" } }, + { data: { result: "world" } }, + { data: { result: "moon" } }, + ]); + }); describe("store transaction updater", () => { const mutation = gql` @@ -1471,388 +1371,318 @@ describe("mutation results", () => { }, }; - itAsync("analogous of ARRAY_INSERT", (resolve, reject) => { - let subscriptionHandle: Subscription; - const { client, obsQuery } = setupObsQuery(reject, { + it("analogous of ARRAY_INSERT", async () => { + const { client, obsQuery } = setupObsQuery({ request: { query: mutation }, result: mutationResult, }); - return obsQuery - .result() - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => - client.mutate({ - mutation, - update: (proxy, mResult: any) => { - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); + await obsQuery.result(); - const id = "TodoList5"; - const fragment = gql` - fragment todoList on TodoList { - todos { - id - text - completed - __typename - } - } - `; + // we have to actually subscribe to the query to be able to update it - const data: any = proxy.readFragment({ id, fragment }); + const handle = client.watchQuery({ query }); + const stream = new ObservableStream(handle); + await stream.takeNext(); + await client.mutate({ + mutation, + update: (proxy, mResult: any) => { + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( + "This one was created with a mutation." + ); - proxy.writeFragment({ - data: { - ...data, - todos: [mResult.data.createTodo, ...data.todos], - }, - id, - fragment, - }); + const id = "TodoList5"; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], }, - }) - ) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + id, + fragment, + }); + }, + }); - // There should be one more todo item than before - expect(newResult.data.todoList.todos.length).toBe(4); + const newResult = await client.query({ query }); - // Since we used `prepend` it should be at the front - expect(newResult.data.todoList.todos[0].text).toBe( - "This one was created with a mutation." - ); - }) - .then(resolve, reject); + // There should be one more todo item than before + expect(newResult.data.todoList.todos.length).toBe(4); + + // Since we used `prepend` it should be at the front + expect(newResult.data.todoList.todos[0].text).toBe( + "This one was created with a mutation." + ); }); - itAsync( - "does not fail if optional query variables are not supplied", - (resolve, reject) => { - let subscriptionHandle: Subscription; - const mutationWithVars = gql` - mutation createTodo($requiredVar: String!, $optionalVar: String) { - createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { - id - text - completed - __typename - } + it("does not fail if optional query variables are not supplied", async () => { + const mutationWithVars = gql` + mutation createTodo($requiredVar: String!, $optionalVar: String) { + createTodo(requiredVar: $requiredVar, optionalVar: $optionalVar) { + id + text + completed __typename } - `; - - // the test will pass if optionalVar is uncommented - const variables = { - requiredVar: "x", - // optionalVar: 'y', - }; - - const { client, obsQuery } = setupObsQuery(reject, { - request: { - query: mutationWithVars, - variables, - }, - result: mutationResult, - }); + __typename + } + `; - return obsQuery - .result() - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve) => { - const handle = client.watchQuery({ - query, - variables, - }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => - client.mutate({ - mutation: mutationWithVars, - variables, - update: (proxy, mResult: any) => { - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); + // the test will pass if optionalVar is uncommented + const variables = { + requiredVar: "x", + // optionalVar: 'y', + }; - const id = "TodoList5"; - const fragment = gql` - fragment todoList on TodoList { - todos { - id - text - completed - __typename - } - } - `; - - const data: any = proxy.readFragment({ id, fragment }); - - proxy.writeFragment({ - data: { - ...data, - todos: [mResult.data.createTodo, ...data.todos], - }, - id, - fragment, - }); - }, - }) - ) - .then(() => { - return client.query({ query }); - }) - .then((newResult: any) => { - subscriptionHandle.unsubscribe(); + const { client, obsQuery } = setupObsQuery({ + request: { + query: mutationWithVars, + variables, + }, + result: mutationResult, + }); + + await obsQuery.result(); + + // we have to actually subscribe to the query to be able to update it + + const handle = client.watchQuery({ + query, + variables, + }); + const stream = new ObservableStream(handle); + await stream.takeNext(); + + await client.mutate({ + mutation: mutationWithVars, + variables, + update: (proxy, mResult: any) => { + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( + "This one was created with a mutation." + ); + + const id = "TodoList5"; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); + }, + }); + const newResult = await client.query({ query }); + + // There should be one more todo item than before + expect(newResult.data.todoList.todos.length).toBe(4); + + // Since we used `prepend` it should be at the front + expect(newResult.data.todoList.todos[0].text).toBe( + "This one was created with a mutation." + ); + }); + + it("does not make next queries fail if a mutation fails", async () => { + const { client, obsQuery } = setupObsQuery( + { + request: { query: mutation }, + result: { errors: [new Error("mock error")] }, + }, + { + request: { query: queryWithTypename }, + result, + } + ); - // There should be one more todo item than before - expect(newResult.data.todoList.todos.length).toBe(4); + const stream = new ObservableStream(obsQuery); + await stream.takeNext(); - // Since we used `prepend` it should be at the front - expect(newResult.data.todoList.todos[0].text).toBe( + await expect( + client.mutate({ + mutation, + update: (proxy, mResult: any) => { + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( "This one was created with a mutation." ); - }) - .then(resolve, reject); - } - ); - itAsync( - "does not make next queries fail if a mutation fails", - (resolve, reject) => { - const { client, obsQuery } = setupObsQuery( - (error) => { - throw error; - }, - { - request: { query: mutation }, - result: { errors: [new Error("mock error")] }, + const id = "TodoList5"; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); }, - { - request: { query: queryWithTypename }, - result, - } - ); + }) + ).rejects.toThrow(); + await expect( + client.mutate({ + mutation, + update: (proxy, mResult: any) => { + expect(mResult.data.createTodo.id).toBe("99"); + expect(mResult.data.createTodo.text).toBe( + "This one was created with a mutation." + ); - obsQuery.subscribe({ - next() { - client - .mutate({ - mutation, - update: (proxy, mResult: any) => { - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); - - const id = "TodoList5"; - const fragment = gql` - fragment todoList on TodoList { - todos { - id - text - completed - __typename - } - } - `; - - const data: any = proxy.readFragment({ id, fragment }); - - proxy.writeFragment({ - data: { - ...data, - todos: [mResult.data.createTodo, ...data.todos], - }, - id, - fragment, - }); - }, - }) - .then( - () => reject(new Error("Mutation should have failed")), - () => - client.mutate({ - mutation, - update: (proxy, mResult: any) => { - expect(mResult.data.createTodo.id).toBe("99"); - expect(mResult.data.createTodo.text).toBe( - "This one was created with a mutation." - ); - - const id = "TodoList5"; - const fragment = gql` - fragment todoList on TodoList { - todos { - id - text - completed - __typename - } - } - `; - - const data: any = proxy.readFragment({ id, fragment }); - - proxy.writeFragment({ - data: { - ...data, - todos: [mResult.data.createTodo, ...data.todos], - }, - id, - fragment, - }); - }, - }) - ) - .then( - () => reject(new Error("Mutation should have failed")), - () => obsQuery.refetch() - ) - .then(resolve, reject); + const id = "TodoList5"; + const fragment = gql` + fragment todoList on TodoList { + todos { + id + text + completed + __typename + } + } + `; + + const data: any = proxy.readFragment({ id, fragment }); + + proxy.writeFragment({ + data: { + ...data, + todos: [mResult.data.createTodo, ...data.todos], + }, + id, + fragment, + }); }, - }); - } - ); + }) + ).rejects.toThrow(); + await obsQuery.refetch(); + }); - itAsync("error handling in reducer functions", (resolve, reject) => { - let subscriptionHandle: Subscription; - const { client, obsQuery } = setupObsQuery(reject, { + it("error handling in reducer functions", async () => { + const { client, obsQuery } = setupObsQuery({ request: { query: mutation }, result: mutationResult, }); - return obsQuery - .result() - .then(() => { - // we have to actually subscribe to the query to be able to update it - return new Promise((resolve) => { - const handle = client.watchQuery({ query }); - subscriptionHandle = handle.subscribe({ - next(res) { - resolve(res); - }, - }); - }); - }) - .then(() => - client.mutate({ - mutation, - update: () => { - throw new Error(`Hello... It's me.`); - }, - }) - ) - .then( - () => { - subscriptionHandle.unsubscribe(); - reject("should have thrown"); + await obsQuery.result(); + // we have to actually subscribe to the query to be able to update it + + const handle = client.watchQuery({ query }); + const stream = new ObservableStream(handle); + await stream.takeNext(); + + await expect( + client.mutate({ + mutation, + update: () => { + throw new Error(`Hello... It's me.`); }, - (error) => { - subscriptionHandle.unsubscribe(); - expect(error.message).toBe(`Hello... It's me.`); - } - ) - .then(resolve, reject); + }) + ).rejects.toThrow( + new ApolloError({ networkError: Error(`Hello... It's me.`) }) + ); }); - itAsync( - "mutate() data should never be `undefined` in case of success", - (resolve, reject) => { - const mutation = gql` - mutation Foo { - foo { - bar - } + it("mutate() data should never be `undefined` in case of success", async () => { + const mutation = gql` + mutation Foo { + foo { + bar } - `; + } + `; - const result1 = { - data: { - foo: { - bar: "a", - }, + const result1 = { + data: { + foo: { + bar: "a", }, - }; - - const client = new ApolloClient({ - link: mockSingleLink({ - request: { query: mutation } as any, - result: result1, - }).setOnError(reject), - cache: new InMemoryCache({ addTypename: false }), - }); + }, + }; - client - .mutate<{ foo: { bar: string } }>({ - mutation: mutation, - }) - .then((result) => { - // This next line should **not** raise "TS2533: Object is possibly 'null' or 'undefined'.", even without `!` operator - if (result.data!.foo.bar) { - resolve(); - } - }, reject); - } - ); + const client = new ApolloClient({ + link: mockSingleLink({ + request: { query: mutation } as any, + result: result1, + }), + cache: new InMemoryCache({ addTypename: false }), + }); - itAsync( - "data might be undefined in case of failure with errorPolicy = ignore", - async (resolve, reject) => { - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: new ApolloLink( - () => - new Observable>((observer) => { - observer.next({ - errors: [new GraphQLError("Oops")], - }); - observer.complete(); - }) - ).setOnError(reject), - }); + const result = await client.mutate<{ foo: { bar: string } }>({ + mutation: mutation, + }); + // This next line should **not** raise "TS2533: Object is possibly 'null' or 'undefined'.", even without `!` operator + if (!result.data?.foo.bar) { + throw new Error("data was unexpectedly undefined"); + } + }); - const ignoreErrorsResult = await client.mutate({ - mutation: gql` - mutation Foo { - foo - } - `, - fetchPolicy: "no-cache", - errorPolicy: "ignore", - }); + it("data might be undefined in case of failure with errorPolicy = ignore", async () => { + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink( + () => + new Observable>((observer) => { + observer.next({ + errors: [new GraphQLError("Oops")], + }); + observer.complete(); + }) + ), + }); - expect(ignoreErrorsResult).toEqual({ - data: undefined, - errors: undefined, - }); + const ignoreErrorsResult = await client.mutate({ + mutation: gql` + mutation Foo { + foo + } + `, + fetchPolicy: "no-cache", + errorPolicy: "ignore", + }); - resolve(); - } - ); + expect(ignoreErrorsResult).toEqual({ + data: undefined, + errors: undefined, + }); + }); }); }); diff --git a/src/cache/inmemory/__tests__/fragmentRegistry.ts b/src/cache/inmemory/__tests__/fragmentRegistry.ts index 8576bc4cee4..b04b57255f7 100644 --- a/src/cache/inmemory/__tests__/fragmentRegistry.ts +++ b/src/cache/inmemory/__tests__/fragmentRegistry.ts @@ -1,7 +1,7 @@ import { ApolloClient, ApolloLink, gql, NetworkStatus } from "../../../core"; import { getFragmentDefinitions, Observable } from "../../../utilities"; import { InMemoryCache, createFragmentRegistry } from "../../index"; -import { itAsync, subscribeAndCount } from "../../../testing"; +import { ObservableStream } from "../../../testing/internal"; describe("FragmentRegistry", () => { it("can be passed to InMemoryCache", () => { @@ -38,7 +38,7 @@ describe("FragmentRegistry", () => { }); }); - itAsync("influences ApolloClient and ApolloLink", (resolve, reject) => { + it("influences ApolloClient and ApolloLink", async () => { const cache = new InMemoryCache({ fragments: createFragmentRegistry(gql` fragment SourceFragment on Query { @@ -86,42 +86,31 @@ describe("FragmentRegistry", () => { }, }); - subscribeAndCount( - reject, - client.watchQuery({ - query, - fetchPolicy: "cache-and-network", - }), - (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: { - __typename: "Query", - source: "local", - }, - }); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - __typename: "Query", - source: "link", - }, - }); - - expect(cache.readQuery({ query })).toEqual({ - source: "link", - }); - - setTimeout(resolve, 10); - } else { - reject(`Unexpectedly many results (${count})`); - } - } + const stream = new ObservableStream( + client.watchQuery({ query, fetchPolicy: "cache-and-network" }) ); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: { + __typename: "Query", + source: "local", + }, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + __typename: "Query", + source: "link", + }, + }); + + expect(cache.readQuery({ query })).toEqual({ + source: "link", + }); }); it("throws an error when not all used fragments are defined", () => { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 736f2a0874f..af4aefaf8e1 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -13,8 +13,8 @@ import { import { MissingFieldError } from "../.."; import { relayStylePagination, stringifyForDisplay } from "../../../utilities"; import { FieldPolicy, StorageType } from "../policies"; -import { itAsync, subscribeAndCount, MockLink } from "../../../testing/core"; -import { spyOnConsole } from "../../../testing/internal"; +import { MockLink } from "../../../testing/core"; +import { ObservableStream, spyOnConsole } from "../../../testing/internal"; function reverse(s: string) { return s.split("").reverse().join(""); @@ -3505,190 +3505,188 @@ describe("type policies", function () { }); }); - itAsync( - "can handle Relay-style pagination without args", - (resolve, reject) => { - const cache = new InMemoryCache({ - addTypename: false, - typePolicies: { - Query: { - fields: { - todos: relayStylePagination(), - }, + it("can handle Relay-style pagination without args", async () => { + const cache = new InMemoryCache({ + addTypename: false, + typePolicies: { + Query: { + fields: { + todos: relayStylePagination(), }, }, - }); + }, + }); - const firstQuery = gql` - query TodoQuery { - todos { - totalCount - } + const firstQuery = gql` + query TodoQuery { + todos { + totalCount } - `; + } + `; - const secondQuery = gql` - query TodoQuery { - todos(after: $after, first: $first) { - pageInfo { - __typename - hasNextPage - endCursor - } - totalCount - edges { + const secondQuery = gql` + query TodoQuery { + todos(after: $after, first: $first) { + pageInfo { + __typename + hasNextPage + endCursor + } + totalCount + edges { + __typename + id + node { __typename id - node { - __typename - id - title - } + title } } } - `; + } + `; - const thirdQuery = gql` - query TodoQuery { - todos { - totalCount - extraMetaData - } + const thirdQuery = gql` + query TodoQuery { + todos { + totalCount + extraMetaData } - `; + } + `; - const secondVariables = { - first: 1, - }; + const secondVariables = { + first: 1, + }; - const secondEdges = [ - { - __typename: "TodoEdge", - id: "edge1", - node: { - __typename: "Todo", - id: "1", - title: "Fix the tests", - }, + const secondEdges = [ + { + __typename: "TodoEdge", + id: "edge1", + node: { + __typename: "Todo", + id: "1", + title: "Fix the tests", }, - ]; + }, + ]; - const secondPageInfo = { - __typename: "PageInfo", - endCursor: "YXJyYXljb25uZWN0aW9uOjI=", - hasNextPage: true, - }; + const secondPageInfo = { + __typename: "PageInfo", + endCursor: "YXJyYXljb25uZWN0aW9uOjI=", + hasNextPage: true, + }; - const link = new MockLink([ - { - request: { - query: firstQuery, - }, - result: { - data: { - todos: { - totalCount: 1292, - }, - }, - }, + const link = new MockLink([ + { + request: { + query: firstQuery, }, - { - request: { - query: secondQuery, - variables: secondVariables, - }, - result: { - data: { - todos: { - edges: secondEdges, - pageInfo: secondPageInfo, - totalCount: 1292, - }, + result: { + data: { + todos: { + totalCount: 1292, }, }, }, - { - request: { - query: thirdQuery, - }, - result: { - data: { - todos: { - totalCount: 1293, - extraMetaData: "extra", - }, - }, - }, + }, + { + request: { + query: secondQuery, + variables: secondVariables, }, - ]).setOnError(reject); - - const client = new ApolloClient({ link, cache }); - - client.query({ query: firstQuery }).then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, + result: { data: { todos: { + edges: secondEdges, + pageInfo: secondPageInfo, totalCount: 1292, }, }, - }); - - expect(cache.extract()).toEqual({ - ROOT_QUERY: { - __typename: "Query", + }, + }, + { + request: { + query: thirdQuery, + }, + result: { + data: { todos: { - edges: [], - pageInfo: { - endCursor: "", - hasNextPage: true, - hasPreviousPage: false, - startCursor: "", - }, - totalCount: 1292, + totalCount: 1293, + extraMetaData: "extra", }, }, - }); + }, + }, + ]).setOnError((error) => { + throw new Error(error); + }); - client - .query({ query: secondQuery, variables: secondVariables }) - .then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - todos: { - edges: secondEdges, - pageInfo: secondPageInfo, - totalCount: 1292, - }, - }, - }); + const client = new ApolloClient({ link, cache }); - expect(cache.extract()).toMatchSnapshot(); + let result = await client.query({ query: firstQuery }); - client.query({ query: thirdQuery }).then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - todos: { - totalCount: 1293, - extraMetaData: "extra", - }, - }, - }); - expect(cache.extract()).toMatchSnapshot(); - resolve(); - }); - }); - }); - } - ); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + todos: { + totalCount: 1292, + }, + }, + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + todos: { + edges: [], + pageInfo: { + endCursor: "", + hasNextPage: true, + hasPreviousPage: false, + startCursor: "", + }, + totalCount: 1292, + }, + }, + }); + + result = await client.query({ + query: secondQuery, + variables: secondVariables, + }); - itAsync("can handle Relay-style pagination", (resolve, reject) => { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + todos: { + edges: secondEdges, + pageInfo: secondPageInfo, + totalCount: 1292, + }, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + result = await client.query({ query: thirdQuery }); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + todos: { + totalCount: 1293, + extraMetaData: "extra", + }, + }, + }); + expect(cache.extract()).toMatchSnapshot(); + }); + + it("can handle Relay-style pagination", async () => { const cache = new InMemoryCache({ addTypename: false, typePolicies: { @@ -4065,7 +4063,9 @@ describe("type policies", function () { }, }, }, - ]).setOnError(reject); + ]).setOnError((error) => { + throw new Error(error); + }); const client = new ApolloClient({ link, cache }); @@ -4086,275 +4086,278 @@ describe("type policies", function () { }, }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: firstEdges, - pageInfo: firstPageInfo, - totalCount: 1292, - }, - }, - }); + const stream = new ObservableStream(observable); - expect(cache.extract()).toMatchSnapshot(); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: firstEdges, + pageInfo: firstPageInfo, + totalCount: 1292, + }, + }, + }); + expect(cache.extract()).toMatchSnapshot(); - observable.fetchMore({ - variables: secondVariables, - }); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: [...firstEdges, ...secondEdges], - pageInfo: { - __typename: "PageInfo", - startCursor: firstPageInfo.startCursor, - endCursor: secondPageInfo.endCursor, - hasPreviousPage: false, - hasNextPage: true, - }, - totalCount: 1292, + await observable.fetchMore({ variables: secondVariables }); + + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: [...firstEdges, ...secondEdges], + pageInfo: { + __typename: "PageInfo", + startCursor: firstPageInfo.startCursor, + endCursor: secondPageInfo.endCursor, + hasPreviousPage: false, + hasNextPage: true, }, + totalCount: 1292, }, - }); + }, + }); + expect(cache.extract()).toMatchSnapshot(); + } - expect(cache.extract()).toMatchSnapshot(); + await observable.fetchMore({ variables: thirdVariables }); - observable.fetchMore({ - variables: thirdVariables, - }); - } else if (count === 3) { - expect(result.data.search.edges.length).toBe(5); + { + const result = await stream.takeNext(); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: [...thirdEdges, ...secondEdges], - pageInfo: { - __typename: "PageInfo", - startCursor: thirdPageInfo.startCursor, - endCursor: secondPageInfo.endCursor, - hasPreviousPage: true, - hasNextPage: true, - }, - totalCount: 1292, + expect(result.data.search.edges.length).toBe(5); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: [...thirdEdges, ...secondEdges], + pageInfo: { + __typename: "PageInfo", + startCursor: thirdPageInfo.startCursor, + endCursor: secondPageInfo.endCursor, + hasPreviousPage: true, + hasNextPage: true, }, + totalCount: 1292, }, - }); + }, + }); - expect(cache.extract()).toMatchSnapshot(); + expect(cache.extract()).toMatchSnapshot(); + } - observable.fetchMore({ - variables: fourthVariables, - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: [...fourthEdges, ...thirdEdges, ...secondEdges], - pageInfo: { - __typename: "PageInfo", - startCursor: firstPageInfo.startCursor, - endCursor: secondPageInfo.endCursor, - hasPreviousPage: false, - hasNextPage: true, - }, - totalCount: 1292, + await observable.fetchMore({ variables: fourthVariables }); + + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: [...fourthEdges, ...thirdEdges, ...secondEdges], + pageInfo: { + __typename: "PageInfo", + startCursor: firstPageInfo.startCursor, + endCursor: secondPageInfo.endCursor, + hasPreviousPage: false, + hasNextPage: true, }, + totalCount: 1292, }, - }); + }, + }); - expect(result.data.search.edges).toEqual([ - ...firstEdges, - ...secondEdges, - ]); + expect(result.data.search.edges).toEqual([ + ...firstEdges, + ...secondEdges, + ]); - expect(cache.extract()).toMatchSnapshot(); + expect(cache.extract()).toMatchSnapshot(); + } - observable.fetchMore({ - variables: fifthVariables, - }); - } else if (count === 5) { - expect(result.data.search.edges.length).toBe(7); + await observable.fetchMore({ variables: fifthVariables }); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: [...firstEdges, ...secondEdges, ...fifthEdges], - pageInfo: { - __typename: "PageInfo", - startCursor: firstPageInfo.startCursor, - endCursor: fifthPageInfo.endCursor, - hasPreviousPage: false, - hasNextPage: true, - }, - totalCount: 1292, + { + const result = await stream.takeNext(); + + expect(result.data.search.edges.length).toBe(7); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: [...firstEdges, ...secondEdges, ...fifthEdges], + pageInfo: { + __typename: "PageInfo", + startCursor: firstPageInfo.startCursor, + endCursor: fifthPageInfo.endCursor, + hasPreviousPage: false, + hasNextPage: true, }, + totalCount: 1292, }, - }); + }, + }); - expect(cache.extract()).toMatchSnapshot(); - - // Now search for a different artist to verify that they keyArgs - // function we passed to relayStylePagination above keeps - // different search queries separate in the cache. - client - .query({ - query, - variables: { - query: "James Turrell", - first: 1, - }, - }) - .then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: turrellEdges.slice(0, 1), - pageInfo: turrellPageInfo1, - totalCount: 13531, - }, - }, - }); + expect(cache.extract()).toMatchSnapshot(); + } - const snapshot = cache.extract(); - expect(snapshot).toMatchSnapshot(); - expect( - // Note that Turrell's name has been lower-cased. - snapshot.ROOT_QUERY!["search:james turrell"] - ).toEqual({ - edges: turrellEdges.slice(0, 1).map((edge) => ({ - ...edge, - // The relayStylePagination merge function updates the - // edge.cursor field of the first and last edge, even if - // the query did not request the edge.cursor field, if - // pageInfo.{start,end}Cursor are defined. - cursor: turrellPageInfo1.startCursor, - // Artist objects are normalized by HREF: - node: { __ref: 'Artist:{"href":"/artist/james-turrell"}' }, - })), - pageInfo: turrellPageInfo1, - totalCount: 13531, - }); + // Now search for a different artist to verify that they keyArgs + // function we passed to relayStylePagination above keeps + // different search queries separate in the cache. + { + const result = await client.query({ + query, + variables: { + query: "James Turrell", + first: 1, + }, + }); + const snapshot = cache.extract(); - // Evict the Basquiat entity to verify that the dangling - // edge.node Reference gets automatically elided from the - // Basquiat search results, thanks to the read function - // generated by the relayStylePagination helper. - expect( - cache.evict({ - id: cache.identify({ - __typename: "Artist", - href: "/artist/jean-michel-basquiat", - }), - }) - ).toBe(true); - }, reject); - } else if (count === 6) { - // Same full list of edges that we saw in the previous case. - const edges = [...firstEdges, ...secondEdges, ...fifthEdges]; - - // Remove the Basquiat edge, which we know to be first. - expect(edges.shift()).toEqual({ - __typename: "SearchableEdge", - node: { - __typename: "Artist", - href: "/artist/jean-michel-basquiat", - displayLabel: "Jean-Michel Basquiat", - bio: "American, 1960-1988, New York, New York, based in New York, New York", + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: turrellEdges.slice(0, 1), + pageInfo: turrellPageInfo1, + totalCount: 13531, }, - }); + }, + }); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges, - pageInfo: { - __typename: "PageInfo", - startCursor: fourthPageInfo.startCursor, - endCursor: fifthPageInfo.endCursor, - hasPreviousPage: false, - hasNextPage: true, - }, - totalCount: 1292, + expect(snapshot).toMatchSnapshot(); + expect( + // Note that Turrell's name has been lower-cased. + snapshot.ROOT_QUERY!["search:james turrell"] + ).toEqual({ + edges: turrellEdges.slice(0, 1).map((edge) => ({ + ...edge, + // The relayStylePagination merge function updates the + // edge.cursor field of the first and last edge, even if + // the query did not request the edge.cursor field, if + // pageInfo.{start,end}Cursor are defined. + cursor: turrellPageInfo1.startCursor, + // Artist objects are normalized by HREF: + node: { __ref: 'Artist:{"href":"/artist/james-turrell"}' }, + })), + pageInfo: turrellPageInfo1, + totalCount: 13531, + }); + } + + // Evict the Basquiat entity to verify that the dangling + // edge.node Reference gets automatically elided from the + // Basquiat search results, thanks to the read function + // generated by the relayStylePagination helper. + expect( + cache.evict({ + id: cache.identify({ + __typename: "Artist", + href: "/artist/jean-michel-basquiat", + }), + }) + ).toBe(true); + + { + const result = await stream.takeNext(); + + // Same full list of edges that we saw in the previous case. + const edges = [...firstEdges, ...secondEdges, ...fifthEdges]; + + // Remove the Basquiat edge, which we know to be first. + expect(edges.shift()).toEqual({ + __typename: "SearchableEdge", + node: { + __typename: "Artist", + href: "/artist/jean-michel-basquiat", + displayLabel: "Jean-Michel Basquiat", + bio: "American, 1960-1988, New York, New York, based in New York, New York", + }, + }); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges, + pageInfo: { + __typename: "PageInfo", + startCursor: fourthPageInfo.startCursor, + endCursor: fifthPageInfo.endCursor, + hasPreviousPage: false, + hasNextPage: true, }, + totalCount: 1292, }, - }); + }, + }); - expect(cache.extract()).toMatchSnapshot(); - - // Now search for James Turrell again with args.first === 2 - // (turrellVariables2), but without args.after, so that the - // new results overwrite the existing results (#6592). - client - .query({ - query, - variables: turrellVariables2, - // Necessary to skip the cache, like fetchMore does. - fetchPolicy: "network-only", - }) - .then((result) => { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - search: { - edges: turrellEdges, - pageInfo: turrellPageInfo2, - totalCount: 13531, - }, - }, - }); + expect(cache.extract()).toMatchSnapshot(); + } - const snapshot = cache.extract(); - expect(snapshot).toMatchSnapshot(); - expect( - // Note that Turrell's name has been lower-cased. - snapshot.ROOT_QUERY!["search:james turrell"] - ).toEqual({ - edges: turrellEdges.map((edge, i) => ({ - ...edge, - // This time the cursors are different depending on which - // of the two edges we're considering. - cursor: [ - turrellPageInfo2.startCursor, - turrellPageInfo2.endCursor, - ][i], - node: [ - // Artist objects are normalized by HREF: - { __ref: 'Artist:{"href":"/artist/james-turrell"}' }, - // However, SearchableItem objects are not normalized. - edge.node, - ][i], - })), - pageInfo: turrellPageInfo2, - totalCount: 13531, - }); + { + // Now search for James Turrell again with args.first === 2 + // (turrellVariables2), but without args.after, so that the + // new results overwrite the existing results (#6592). + const result = await client.query({ + query, + variables: turrellVariables2, + // Necessary to skip the cache, like fetchMore does. + fetchPolicy: "network-only", + }); + const snapshot = cache.extract(); - // Wait a bit to make sure there are no additional results for - // Basquiat. - setTimeout(resolve, 100); - }); - } else { - reject("should not receive another result for Basquiat"); - } - }); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + search: { + edges: turrellEdges, + pageInfo: turrellPageInfo2, + totalCount: 13531, + }, + }, + }); + + expect(snapshot).toMatchSnapshot(); + expect( + // Note that Turrell's name has been lower-cased. + snapshot.ROOT_QUERY!["search:james turrell"] + ).toEqual({ + edges: turrellEdges.map((edge, i) => ({ + ...edge, + // This time the cursors are different depending on which + // of the two edges we're considering. + cursor: [turrellPageInfo2.startCursor, turrellPageInfo2.endCursor][ + i + ], + node: [ + // Artist objects are normalized by HREF: + { __ref: 'Artist:{"href":"/artist/james-turrell"}' }, + // However, SearchableItem objects are not normalized. + edge.node, + ][i], + })), + pageInfo: turrellPageInfo2, + totalCount: 13531, + }); + } + + await expect(stream).not.toEmitAnything(); }); it("runs nested merge functions as well as ancestors", function () { @@ -5152,7 +5155,7 @@ describe("type policies", function () { expect(personMergeCount).toBe(3); }); - it("can force merging references with non-normalized objects", function () { + it("can force merging references with non-normalized objects", async function () { const nameQuery = gql` query GetName { viewer { @@ -5170,7 +5173,7 @@ describe("type policies", function () { } `; - check( + await check( new InMemoryCache({ typePolicies: { Query: { @@ -5184,7 +5187,7 @@ describe("type policies", function () { }) ); - check( + await check( new InMemoryCache({ typePolicies: { User: { @@ -5194,7 +5197,7 @@ describe("type policies", function () { }) ); - function check(cache: InMemoryCache) { + async function check(cache: InMemoryCache) { // Write nameQuery first, so the existing data will be a // non-normalized object when we write emailQuery next. cache.writeQuery({ @@ -5266,7 +5269,7 @@ describe("type policies", function () { }, }); - cache.reset(); + await cache.reset(); expect(cache.extract()).toEqual({}); // Write emailQuery first, so the existing data will be a diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts index 3b639e14b58..96ef174df4b 100644 --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -21,23 +21,21 @@ import { InMemoryCache, NormalizedCacheObject } from "../../cache"; import { ApolloError } from "../../errors"; import { - itAsync, MockLink, mockSingleLink, MockSubscriptionLink, - subscribeAndCount, + tick, wait, } from "../../testing"; import mockQueryManager, { getDefaultOptionsForQueryManagerTests, } from "../../testing/core/mocking/mockQueryManager"; import mockWatchQuery from "../../testing/core/mocking/mockWatchQuery"; -import wrap from "../../testing/core/wrap"; import { resetStore } from "./QueryManager"; import { SubscriptionObserver } from "zen-observable-ts"; import { waitFor } from "@testing-library/react"; -import { ObservableStream } from "../../testing/internal"; +import { ObservableStream, spyOnConsole } from "../../testing/internal"; export const mockFetchQuery = (queryManager: QueryManager) => { const fetchConcastWithInfo = queryManager["fetchConcastWithInfo"]; @@ -112,80 +110,50 @@ describe("ObservableQuery", () => { describe("setOptions", () => { describe("to change pollInterval", () => { - itAsync( - "starts polling if goes from 0 -> something", - (resolve, reject) => { - const manager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); + it("starts polling if goes from 0 -> something", async () => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); - const observable = manager.watchQuery({ - query, - variables, - notifyOnNetworkStatusChange: false, - }); + const observable = manager.watchQuery({ + query, + variables, + notifyOnNetworkStatusChange: false, + }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable.setOptions({ query, pollInterval: 10 }); - } else if (handleCount === 2) { - expect(result.data).toEqual(dataTwo); - observable.stopPolling(); - resolve(); - } - }); + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataOne); } - ); - itAsync( - "stops polling if goes from something -> 0", - (resolve, reject) => { - const manager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); + await observable.setOptions({ query, pollInterval: 10 }); - const observable = manager.watchQuery({ - query, - variables, - pollInterval: 10, - }); + { + const { data } = await stream.takeNext(); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable.setOptions({ query, pollInterval: 0 }); - setTimeout(resolve, 5); - } else if (handleCount === 2) { - reject(new Error("Should not get more than one result")); - } - }); + expect(data).toEqual(dataTwo); } - ); - itAsync("can change from x>0 to y>0", (resolve, reject) => { + observable.stopPolling(); + + await expect(stream).not.toEmitAnything(); + }); + + it("stops polling if goes from something -> 0", async () => { const manager = mockQueryManager( { request: { query, variables }, @@ -204,24 +172,68 @@ describe("ObservableQuery", () => { const observable = manager.watchQuery({ query, variables, - pollInterval: 100, - notifyOnNetworkStatusChange: false, + pollInterval: 10, }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable.setOptions({ query, pollInterval: 10 }); - } else if (handleCount === 2) { - expect(result.data).toEqual(dataTwo); - observable.stopPolling(); - resolve(); + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataOne); + } + + await observable.setOptions({ query, pollInterval: 0 }); + + await expect(stream).not.toEmitAnything(); + }); + + it("can change from x>0 to y>0", async () => { + const manager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, } + ); + + const observable = manager.watchQuery({ + query, + variables, + pollInterval: 100, + notifyOnNetworkStatusChange: false, }); + + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataOne); + } + + await observable.setOptions({ query, pollInterval: 10 }); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataTwo); + } + + observable.stopPolling(); + + await expect(stream).not.toEmitAnything(); }); }); - itAsync("does not break refetch", (resolve, reject) => { + it("does not break refetch", async () => { // This query and variables are copied from react-apollo const queryWithVars = gql` query people($first: Int) { @@ -262,23 +274,35 @@ describe("ObservableQuery", () => { notifyOnNetworkStatusChange: true, }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - return observable.refetch(variables2); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(data2); - resolve(); - } - }); + const stream = new ObservableStream(observable); + + { + const { data, loading } = await stream.takeNext(); + + expect(data).toEqual(data); + expect(loading).toBe(false); + } + + await observable.refetch(variables2); + + { + const { loading, networkStatus } = await stream.takeNext(); + + expect(loading).toBe(true); + expect(networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const { data, loading } = await stream.takeNext(); + + expect(loading).toBe(false); + expect(data).toEqual(data2); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync("rerenders when refetch is called", (resolve, reject) => { + it("rerenders when refetch is called", async () => { // This query and variables are copied from react-apollo const query = gql` query people($first: Int) { @@ -318,400 +342,417 @@ describe("ObservableQuery", () => { notifyOnNetworkStatusChange: true, }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toEqual(false); - expect(result.data).toEqual(data); - return observable.refetch(); - } else if (handleCount === 2) { - expect(result.loading).toEqual(true); - expect(result.networkStatus).toEqual(NetworkStatus.refetch); - } else if (handleCount === 3) { - expect(result.loading).toEqual(false); - expect(result.data).toEqual(data2); - resolve(); - } - }); - }); + const stream = new ObservableStream(observable); - itAsync( - "rerenders with new variables then shows correct data for previous variables", - (resolve, reject) => { - // This query and variables are copied from react-apollo - const query = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name - } - } - } - `; + { + const result = await stream.takeNext(); - const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; - const variables = { first: 0 }; + expect(result.loading).toEqual(false); + expect(result.data).toEqual(data); + } - const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; - const variables2 = { first: 1 }; + await observable.refetch(); - const observable: ObservableQuery = mockWatchQuery( - { - request: { - query, - variables, - }, - result: { data }, - }, - { - request: { - query, - variables: variables2, - }, - result: { data: data2 }, - } - ); + { + const { loading, networkStatus } = await stream.takeNext(); - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - await observable.setOptions({ - variables: variables2, - notifyOnNetworkStatusChange: true, - }); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(data2); - // go back to first set of variables - const current = await observable.reobserve({ variables }); - expect(current.data).toEqual(data); - resolve(); - } - }); + expect(loading).toEqual(true); + expect(networkStatus).toEqual(NetworkStatus.refetch); } - ); - // TODO: Something isn't quite right with this test. It's failing but not - // for the right reasons. - itAsync.skip( - "if query is refetched, and an error is returned, no other observer callbacks will be called", - (resolve) => { - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { errors: [error] }, - }, - { - request: { query, variables }, - result: { data: dataOne }, - } - ); + { + const result = await stream.takeNext(); - let handleCount = 0; - observable.subscribe({ - next: (result) => { - handleCount++; - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable.refetch(); - } else if (handleCount === 3) { - throw new Error("next shouldn't fire after an error"); - } - }, - error: () => { - handleCount++; - expect(handleCount).toBe(2); - observable.refetch(); - setTimeout(resolve, 25); - }, - }); + expect(result.loading).toEqual(false); + expect(result.data).toEqual(data2); } - ); - - itAsync( - "does a network request if fetchPolicy becomes networkOnly", - (resolve, reject) => { - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataOne); - return observable.setOptions({ fetchPolicy: "network-only" }); - } else if (handleCount === 2) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataTwo); - resolve(); - } - }); - } - ); + await expect(stream).not.toEmitAnything(); + }); - itAsync( - "does a network request if fetchPolicy is cache-only then store is reset then fetchPolicy becomes not cache-only", - (resolve, reject) => { - const testQuery = gql` - query { - author { - firstName - lastName + it("rerenders with new variables then shows correct data for previous variables", async () => { + // This query and variables are copied from react-apollo + const query = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name } } - `; - const data = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; - - let timesFired = 0; - const link: ApolloLink = ApolloLink.from([ - () => - new Observable((observer) => { - timesFired += 1; - observer.next({ data }); - observer.complete(); - }), - ]); - - const queryManager = createQueryManager({ link }); - // fetch first data from server - const observable = queryManager.watchQuery({ - query: testQuery, - }); + } + `; - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(timesFired).toBe(1); - // set policy to be cache-only but data is found - await observable.setOptions({ fetchPolicy: "cache-only" }); - await resetStore(queryManager); - } else if (handleCount === 2) { - expect(result.data).toEqual({}); - expect(result.loading).toBe(false); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(timesFired).toBe(1); - resolve(); - } - }); - } - ); + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const variables = { first: 0 }; - itAsync( - "does a network request if fetchPolicy changes from cache-only", - (resolve, reject) => { - const testQuery = gql` - query { - author { - firstName - lastName - } - } - `; - const data = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const variables2 = { first: 1 }; - let timesFired = 0; - const link: ApolloLink = ApolloLink.from([ - () => { - return new Observable((observer) => { - timesFired += 1; - observer.next({ data }); - observer.complete(); - }); + const observable: ObservableQuery = mockWatchQuery( + { + request: { + query, + variables, }, - ]); + result: { data }, + }, + { + request: { + query, + variables: variables2, + }, + result: { data: data2 }, + } + ); - const queryManager = createQueryManager({ link }); + const stream = new ObservableStream(observable); - const observable = queryManager.watchQuery({ - query: testQuery, - fetchPolicy: "cache-only", - notifyOnNetworkStatusChange: false, - }); + { + const result = await stream.takeNext(); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({}); - expect(timesFired).toBe(0); - observable.setOptions({ fetchPolicy: "cache-first" }); - } else if (handleCount === 2) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(data); - expect(timesFired).toBe(1); - resolve(); - } - }); + expect(result.data).toEqual(data); + expect(result.loading).toBe(false); } - ); - itAsync( - "can set queries to standby and will not fetch when doing so", - (resolve, reject) => { - let queryManager: QueryManager; - let observable: ObservableQuery; - const testQuery = gql` - query { - author { - firstName - lastName - } - } - `; - const data = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; + await observable.setOptions({ + variables: variables2, + notifyOnNetworkStatusChange: true, + }); - let timesFired = 0; - const link: ApolloLink = ApolloLink.from([ - () => { - return new Observable((observer) => { - timesFired += 1; - observer.next({ data }); - observer.complete(); - return; - }); - }, - ]); - queryManager = createQueryManager({ link }); - observable = queryManager.watchQuery({ - query: testQuery, - fetchPolicy: "cache-first", - notifyOnNetworkStatusChange: false, - }); + { + const result = await stream.takeNext(); - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(timesFired).toBe(1); - await observable.setOptions({ query, fetchPolicy: "standby" }); - // make sure the query didn't get fired again. - expect(timesFired).toBe(1); - resolve(); - } else if (handleCount === 2) { - throw new Error("Handle should not be triggered on standby query"); - } - }); + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); } - ); - itAsync( - "will not fetch when setting a cache-only query to standby", - (resolve, reject) => { - let queryManager: QueryManager; - let observable: ObservableQuery; - const testQuery = gql` - query { - author { - firstName - lastName - } - } - `; - const data = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; + { + const result = await stream.takeNext(); - let timesFired = 0; - const link: ApolloLink = ApolloLink.from([ - () => { - return new Observable((observer) => { - timesFired += 1; - observer.next({ data }); - observer.complete(); - return; - }); - }, - ]); - queryManager = createQueryManager({ link }); - - queryManager.query({ query: testQuery }).then(() => { - observable = queryManager.watchQuery({ - query: testQuery, - fetchPolicy: "cache-first", - notifyOnNetworkStatusChange: false, - }); + expect(result.loading).toBe(false); + expect(result.data).toEqual(data2); + } - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(timesFired).toBe(1); - await observable.setOptions({ query, fetchPolicy: "standby" }); - // make sure the query didn't get fired again. - expect(timesFired).toBe(1); - resolve(); - } else if (handleCount === 2) { - throw new Error( - "Handle should not be triggered on standby query" - ); - } + // go back to first set of variables + const current = await observable.reobserve({ variables }); + expect(current.data).toEqual(data); + }); + + it("if query is refetched, and an error is returned, no other observer callbacks will be called", async () => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { errors: [error] }, + }, + { + request: { query, variables }, + result: { data: dataOne }, + } + ); + + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataOne); + } + + await observable.refetch().catch(() => {}); + + await stream.takeError(); + + await observable.refetch(); + + await expect(stream).not.toEmitAnything(); + }); + + it("does a network request if fetchPolicy becomes networkOnly", async () => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); + + const stream = new ObservableStream(observable); + + { + const { data, loading } = await stream.takeNext(); + + expect(loading).toEqual(false); + expect(data).toEqual(dataOne); + } + + await observable.setOptions({ fetchPolicy: "network-only" }); + + { + const { data, loading } = await stream.takeNext(); + + expect(loading).toEqual(false); + expect(data).toEqual(dataTwo); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("does a network request if fetchPolicy is cache-only then store is reset then fetchPolicy becomes not cache-only", async () => { + const testQuery = gql` + query { + author { + firstName + lastName + } + } + `; + const data = { + author: { + firstName: "John", + lastName: "Smith", + }, + }; + + let timesFired = 0; + const link: ApolloLink = ApolloLink.from([ + () => + new Observable((observer) => { + timesFired += 1; + observer.next({ data }); + observer.complete(); + }), + ]); + + const queryManager = createQueryManager({ link }); + // fetch first data from server + const observable = queryManager.watchQuery({ + query: testQuery, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(timesFired).toBe(1); + } + + await observable.setOptions({ fetchPolicy: "cache-only" }); + await resetStore(queryManager); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual({}); + expect(result.loading).toBe(false); + expect(result.networkStatus).toBe(NetworkStatus.ready); + expect(timesFired).toBe(1); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("does a network request if fetchPolicy changes from cache-only", async () => { + const testQuery = gql` + query { + author { + firstName + lastName + } + } + `; + const data = { + author: { + firstName: "John", + lastName: "Smith", + }, + }; + + let timesFired = 0; + const link: ApolloLink = ApolloLink.from([ + () => { + return new Observable((observer) => { + timesFired += 1; + observer.next({ data }); + observer.complete(); }); - }); + }, + ]); + + const queryManager = createQueryManager({ link }); + + const observable = queryManager.watchQuery({ + query: testQuery, + fetchPolicy: "cache-only", + notifyOnNetworkStatusChange: false, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual({}); + expect(timesFired).toBe(0); } - ); - itAsync( - "returns a promise which eventually returns data", - (resolve, reject) => { - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, + await observable.setOptions({ fetchPolicy: "cache-first" }); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual(data); + expect(timesFired).toBe(1); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("can set queries to standby and will not fetch when doing so", async () => { + let queryManager: QueryManager; + let observable: ObservableQuery; + const testQuery = gql` + query { + author { + firstName + lastName } - ); + } + `; + const data = { + author: { + firstName: "John", + lastName: "Smith", + }, + }; + + let timesFired = 0; + const link: ApolloLink = ApolloLink.from([ + () => { + return new Observable((observer) => { + timesFired += 1; + observer.next({ data }); + observer.complete(); + return; + }); + }, + ]); + queryManager = createQueryManager({ link }); + observable = queryManager.watchQuery({ + query: testQuery, + fetchPolicy: "cache-first", + notifyOnNetworkStatusChange: false, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(timesFired).toBe(1); + } - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable - .setOptions({ - fetchPolicy: "cache-and-network", - }) - .then((res) => { - expect(res.data).toEqual(dataTwo); - }) - .then(resolve, reject); + await observable.setOptions({ query, fetchPolicy: "standby" }); + // make sure the query didn't get fired again. + expect(timesFired).toBe(1); + + await expect(stream).not.toEmitAnything(); + }); + + it("will not fetch when setting a cache-only query to standby", async () => { + let queryManager: QueryManager; + let observable: ObservableQuery; + const testQuery = gql` + query { + author { + firstName + lastName } - }); + } + `; + const data = { + author: { + firstName: "John", + lastName: "Smith", + }, + }; + + let timesFired = 0; + const link: ApolloLink = ApolloLink.from([ + () => { + return new Observable((observer) => { + timesFired += 1; + observer.next({ data }); + observer.complete(); + return; + }); + }, + ]); + queryManager = createQueryManager({ link }); + + await queryManager.query({ query: testQuery }); + + observable = queryManager.watchQuery({ + query: testQuery, + fetchPolicy: "cache-first", + notifyOnNetworkStatusChange: false, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(timesFired).toBe(1); } - ); + + await observable.setOptions({ query, fetchPolicy: "standby" }); + // make sure the query didn't get fired again. + expect(timesFired).toBe(1); + + await expect(stream).not.toEmitAnything(); + }); + + it("returns a promise which eventually returns data", async () => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); + + const stream = new ObservableStream(observable); + + const { data } = await stream.takeNext(); + + expect(data).toEqual(dataOne); + + const res = await observable.setOptions({ + fetchPolicy: "cache-and-network", + }); + + expect(res.data).toEqual(dataTwo); + }); }); describe("setVariables", () => { - itAsync("reruns query if the variables change", (resolve, reject) => { + it("reruns query if the variables change", async () => { const queryManager = mockQueryManager( { request: { query, variables }, @@ -729,1679 +770,1670 @@ describe("ObservableQuery", () => { notifyOnNetworkStatusChange: true, }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataOne); - return observable.setVariables(differentVariables); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataTwo); - resolve(); + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual(dataOne); + } + + await observable.setVariables(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual(dataTwo); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("does invalidate the currentResult data if the variables change", async () => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + delay: 25, } - }); + ); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(dataOne); + expect(observable.getCurrentResult().data).toEqual(dataOne); + } + + await observable.setVariables(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result.loading).toEqual(false); + expect(result.data).toEqual(dataTwo); + expect(observable.getCurrentResult().data).toEqual(dataTwo); + expect(observable.getCurrentResult().loading).toBe(false); + } + + await expect(stream).not.toEmitAnything(); }); - itAsync( - "does invalidate the currentResult data if the variables change", - (resolve, reject) => { - const observable: ObservableQuery = mockWatchQuery( + it("does invalidate the currentResult data if the variables change", async () => { + // Standard data for all these tests + const query = gql` + query UsersQuery($page: Int) { + users { + id + name + posts(page: $page) { + title + } + } + } + `; + const variables = { page: 1 }; + const differentVariables = { page: 2 }; + const dataOne = { + users: [ { - request: { query, variables }, - result: { data: dataOne }, + id: 1, + name: "James", + posts: [{ title: "GraphQL Summit" }, { title: "Awesome" }], }, + ], + }; + const dataTwo = { + users: [ { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - delay: 25, - } - ); + id: 1, + name: "James", + posts: [{ title: "Old post" }], + }, + ], + }; + + const observable: ObservableQuery = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + delay: 25, + } + ); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(dataOne); + expect(observable.getCurrentResult().data).toEqual(dataOne); + } + + await observable.setVariables(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(dataTwo); + expect(observable.getCurrentResult().data).toEqual(dataTwo); + expect(observable.getCurrentResult().loading).toBe(false); + } + + await expect(stream).not.toEmitAnything(); + }); - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - expect(observable.getCurrentResult().data).toEqual(dataOne); - await observable.setVariables(differentVariables); + it("does not invalidate the currentResult errors if the variables change", async () => { + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { errors: [error] }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + } + ); + + const observable = queryManager.watchQuery({ + query, + variables, + errorPolicy: "all", + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.errors).toEqual([error]); + expect(observable.getCurrentResult().errors).toEqual([error]); + } + + await observable.setVariables(differentVariables); + expect(observable.getCurrentResult().errors).toBeUndefined(); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(dataTwo); + expect(observable.getCurrentResult().data).toEqual(dataTwo); + expect(observable.getCurrentResult().loading).toBe(false); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("does not perform a query when unsubscribed if variables change", async () => { + // Note: no responses, will throw if a query is made + const queryManager = mockQueryManager(); + const observable = queryManager.watchQuery({ query, variables }); + + await observable.setVariables(differentVariables); + }); + + it("sets networkStatus to `setVariables` when fetching", async () => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; + + const queryManager = mockQueryManager(...mockedResponses); + const firstRequest = mockedResponses[0].request; + const observable = queryManager.watchQuery({ + query: firstRequest.query, + variables: firstRequest.variables, + notifyOnNetworkStatusChange: true, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual(dataOne); + expect(result.networkStatus).toBe(NetworkStatus.ready); + } + + await observable.setVariables(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.networkStatus).toBe(NetworkStatus.ready); + expect(result.data).toEqual(dataTwo); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("sets networkStatus to `setVariables` when calling refetch with new variables", async () => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; + + const queryManager = mockQueryManager(...mockedResponses); + const firstRequest = mockedResponses[0].request; + const observable = queryManager.watchQuery({ + query: firstRequest.query, + variables: firstRequest.variables, + notifyOnNetworkStatusChange: true, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.data).toEqual(dataOne); + expect(result.networkStatus).toBe(NetworkStatus.ready); + } + + await observable.refetch(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.networkStatus).toBe(NetworkStatus.ready); + expect(result.data).toEqual(dataTwo); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("does not rerun query if variables do not change", async () => { + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); + + const stream = new ObservableStream(observable); + + const result = await stream.takeNext(); + + expect(result.data).toEqual(dataOne); + + await observable.setVariables(variables); + + await expect(stream).not.toEmitAnything(); + }); + + it("handles variables changing while a query is in-flight", async () => { + // The expected behavior is that the original variables are forgotten + // and the query stays in loading state until the result for the new variables + // has returned. + const observable = mockWatchQuery( + { + request: { query, variables }, + result: { data: dataOne }, + delay: 20, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + delay: 20, + } + ); + + const stream = new ObservableStream(observable); + + await observable.setVariables(differentVariables); + + const result = await stream.takeNext(); + + expect(result.networkStatus).toBe(NetworkStatus.ready); + expect(result.loading).toBe(false); + expect(result.data).toEqual(dataTwo); + + await expect(stream).not.toEmitAnything(); + }); + }); + + describe("refetch", () => { + it("calls fetchRequest with fetchPolicy `network-only` when using a non-networked fetch policy", async () => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; + + const queryManager = mockQueryManager(...mockedResponses); + const firstRequest = mockedResponses[0].request; + const observable = queryManager.watchQuery({ + query: firstRequest.query, + variables: firstRequest.variables, + fetchPolicy: "cache-first", + }); + + const mocks = mockFetchQuery(queryManager); + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataOne, + }); + } + + await observable.refetch(differentVariables); + + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataTwo, + }); + + const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; + expect(fqbpCalls.length).toBe(2); + expect(fqbpCalls[0][1].fetchPolicy).toEqual("cache-first"); + expect(fqbpCalls[1][1].fetchPolicy).toEqual("network-only"); + + const fqoCalls = mocks.fetchConcastWithInfo.mock.calls; + expect(fqoCalls.length).toBe(2); + expect(fqoCalls[0][1].fetchPolicy).toEqual("cache-first"); + expect(fqoCalls[1][1].fetchPolicy).toEqual("network-only"); + + // Although the options.fetchPolicy we passed just now to + // fetchQueryByPolicy should have been network-only, + // observable.options.fetchPolicy should now be updated to + // cache-first, thanks to options.nextFetchPolicy. + expect(observable.options.fetchPolicy).toBe("cache-first"); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("calling refetch with different variables before the query itself resolved will only yield the result for the new variables", async () => { + const observers: SubscriptionObserver>[] = []; + const queryManager = new QueryManager( + getDefaultOptionsForQueryManagerTests({ + cache: new InMemoryCache(), + link: new ApolloLink((operation, forward) => { + return new Observable((observer) => { + observers.push(observer); + }); + }), + }) + ); + const observableQuery = queryManager.watchQuery({ + query, + variables: { id: 1 }, + }); + const stream = new ObservableStream(observableQuery); + + void observableQuery.refetch({ id: 2 }); + + observers[0].next({ data: dataOne }); + observers[0].complete(); + + observers[1].next({ data: dataTwo }); + observers[1].complete(); + + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataTwo, + }); + + await expect(stream).not.toEmitAnything(); + }); + + it("calling refetch multiple times with different variables will return only results for the most recent variables", async () => { + const observers: SubscriptionObserver>[] = []; + const queryManager = new QueryManager( + getDefaultOptionsForQueryManagerTests({ + cache: new InMemoryCache(), + link: new ApolloLink((operation, forward) => { + return new Observable((observer) => { + observers.push(observer); + }); + }), + }) + ); + const observableQuery = queryManager.watchQuery({ + query, + variables: { id: 1 }, + }); + const stream = new ObservableStream(observableQuery); + + observers[0].next({ data: dataOne }); + observers[0].complete(); + + { + const result = await stream.takeNext(); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataOne, + }); + } + + void observableQuery.refetch({ id: 2 }); + void observableQuery.refetch({ id: 3 }); + + observers[1].next({ data: dataTwo }); + observers[1].complete(); + + observers[2].next({ + data: { + people_one: { + name: "SomeOneElse", + }, + }, + }); + observers[2].complete(); + + { + const result = await stream.takeNext(); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + people_one: { + name: "SomeOneElse", + }, + }, + }); + } + }); + + it("calls fetchRequest with fetchPolicy `no-cache` when using `no-cache` fetch policy", async () => { + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables: differentVariables }, + result: { data: dataTwo }, + }, + ]; + + const queryManager = mockQueryManager(...mockedResponses); + const firstRequest = mockedResponses[0].request; + const observable = queryManager.watchQuery({ + query: firstRequest.query, + variables: firstRequest.variables, + fetchPolicy: "no-cache", + }); + + const mocks = mockFetchQuery(queryManager); + const stream = new ObservableStream(observable); + + await stream.takeNext(); + await observable.refetch(differentVariables); + + const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; + expect(fqbpCalls.length).toBe(2); + expect(fqbpCalls[1][1].fetchPolicy).toBe("no-cache"); + + // Unlike network-only or cache-and-network, the no-cache + // FetchPolicy does not switch to cache-first after the first + // network request. + expect(observable.options.fetchPolicy).toBe("no-cache"); + const fqoCalls = mocks.fetchConcastWithInfo.mock.calls; + expect(fqoCalls.length).toBe(2); + expect(fqoCalls[1][1].fetchPolicy).toBe("no-cache"); + }); + + it("calls ObservableQuery.next even after hitting cache", async () => { + // This query and variables are copied from react-apollo + const queryWithVars = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } + } + } + `; + + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const variables1 = { first: 0 }; + + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const variables2 = { first: 1 }; + + const queryManager = mockQueryManager( + { + request: { + query: queryWithVars, + variables: variables1, + }, + result: { data }, + }, + { + request: { + query: queryWithVars, + variables: variables2, + }, + result: { data: data2 }, + }, + { + request: { + query: queryWithVars, + variables: variables1, + }, + result: { data }, + } + ); + + const observable = queryManager.watchQuery({ + query: queryWithVars, + variables: variables1, + fetchPolicy: "cache-and-network", + notifyOnNetworkStatusChange: true, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(result.loading).toBe(false); + await observable.refetch(variables2); + } + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data2); + expect(result.loading).toBe(false); + await observable.refetch(variables1); + } + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + } + + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(result.loading).toBe(false); + } + }); + + it("resets fetchPolicy when variables change when using nextFetchPolicy", async () => { + // This query and variables are copied from react-apollo + const queryWithVars = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } + } + } + `; + + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const variables1 = { first: 0 }; + + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const variables2 = { first: 1 }; + + const queryManager = mockQueryManager( + { + request: { + query: queryWithVars, + variables: variables1, + }, + result: { data }, + }, + { + request: { + query: queryWithVars, + variables: variables2, + }, + result: { data: data2 }, + }, + { + request: { + query: queryWithVars, + variables: variables1, + }, + result: { data }, + }, + { + request: { + query: queryWithVars, + variables: variables2, + }, + result: { data: data2 }, + } + ); + + const usedFetchPolicies: WatchQueryFetchPolicy[] = []; + const observable = queryManager.watchQuery({ + query: queryWithVars, + variables: variables1, + fetchPolicy: "cache-and-network", + nextFetchPolicy(currentFetchPolicy, info) { + if (info.reason === "variables-changed") { + return info.initialFetchPolicy; + } + usedFetchPolicies.push(currentFetchPolicy); + if (info.reason === "after-fetch") { + return "cache-first"; } - expect(observable.getCurrentResult().data).toEqual(dataTwo); - expect(observable.getCurrentResult().loading).toBe(false); - resolve(); - }); - } - ); + return currentFetchPolicy; + }, + notifyOnNetworkStatusChange: true, + }); - itAsync( - "does invalidate the currentResult data if the variables change", - (resolve, reject) => { - // Standard data for all these tests - const query = gql` - query UsersQuery($page: Int) { - users { - id - name - posts(page: $page) { - title - } - } - } - `; - const variables = { page: 1 }; - const differentVariables = { page: 2 }; - const dataOne = { - users: [ - { - id: 1, - name: "James", - posts: [{ title: "GraphQL Summit" }, { title: "Awesome" }], - }, - ], - }; - const dataTwo = { - users: [ - { - id: 1, - name: "James", - posts: [{ title: "Old post" }], - }, - ], - }; + expect(observable.options.fetchPolicy).toBe("cache-and-network"); + expect(observable.options.initialFetchPolicy).toBe("cache-and-network"); - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - delay: 25, - } - ); + const stream = new ObservableStream(observable); - subscribeAndCount(reject, observable, async (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - expect(observable.getCurrentResult().data).toEqual(dataOne); - await observable.setVariables(differentVariables); - } - expect(observable.getCurrentResult().data).toEqual(dataTwo); - expect(observable.getCurrentResult().loading).toBe(false); - resolve(); - }); + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync( - "does not invalidate the currentResult errors if the variables change", - (resolve, reject) => { - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { errors: [error] }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - } - ); + await observable.refetch(variables2); - const observable = queryManager.watchQuery({ - query, - variables, - errorPolicy: "all", - }); + { + const result = await stream.takeNext(); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.errors).toEqual([error]); - expect(observable.getCurrentResult().errors).toEqual([error]); - observable.setVariables(differentVariables); - expect(observable.getCurrentResult().errors).toBeUndefined(); - } else if (handleCount === 2) { - expect(result.data).toEqual(dataTwo); - expect(observable.getCurrentResult().data).toEqual(dataTwo); - expect(observable.getCurrentResult().loading).toBe(false); - resolve(); - } - }); + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync( - "does not perform a query when unsubscribed if variables change", - (resolve, reject) => { - // Note: no responses, will throw if a query is made - const queryManager = mockQueryManager(); - const observable = queryManager.watchQuery({ query, variables }); - return observable - .setVariables(differentVariables) - .then(resolve, reject); - } - ); + { + const result = await stream.takeNext(); - itAsync( - "sets networkStatus to `setVariables` when fetching", - (resolve, reject) => { - const mockedResponses = [ - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }, - ]; + expect(result.data).toEqual(data2); + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); + } - const queryManager = mockQueryManager(...mockedResponses); - const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ - query: firstRequest.query, - variables: firstRequest.variables, - notifyOnNetworkStatusChange: true, - }); + { + const result = await observable.setOptions({ variables: variables1 }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataOne); - expect(result.networkStatus).toBe(NetworkStatus.ready); - observable.setVariables(differentVariables); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.data).toEqual(dataTwo); - resolve(); - } - }); + expect(result.data).toEqual(data); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync( - "sets networkStatus to `setVariables` when calling refetch with new variables", - (resolve, reject) => { - const mockedResponses = [ - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }, - ]; + { + const result = await stream.takeNext(); - const queryManager = mockQueryManager(...mockedResponses); - const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ - query: firstRequest.query, - variables: firstRequest.variables, - notifyOnNetworkStatusChange: true, - }); + expect(result.loading).toBe(true); + expect(result.networkStatus).toBe(NetworkStatus.setVariables); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); + } - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataOne); - expect(result.networkStatus).toBe(NetworkStatus.ready); - observable.refetch(differentVariables); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.data).toEqual(dataTwo); - resolve(); - } - }); + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data); + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync( - "does not rerun query if variables do not change", - (resolve, reject) => { - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); + { + const result = await observable.reobserve({ variables: variables2 }); - let errored = false; - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(dataOne); - observable.setVariables(variables); - - // Nothing should happen, so we'll wait a moment to check that - setTimeout(() => !errored && resolve(), 10); - } else if (handleCount === 2) { - errored = true; - throw new Error("Observable callback should not fire twice"); - } - }); + expect(result.data).toEqual(data2); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - itAsync( - "handles variables changing while a query is in-flight", - (resolve, reject) => { - // The expected behavior is that the original variables are forgotten - // and the query stays in loading state until the result for the new variables - // has returned. - const observable: ObservableQuery = mockWatchQuery( - { - request: { query, variables }, - result: { data: dataOne }, - delay: 20, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - delay: 20, - } - ); + { + const result = await stream.takeNext(); - observable.setVariables(differentVariables); + expect(result.data).toEqual(data2); + expect(result.loading).toBe(true); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); + } - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.loading).toBe(false); - expect(result.data).toEqual(dataTwo); - resolve(); - } else { - reject(new Error("should not deliver more than one result")); - } - }); + { + const result = await stream.takeNext(); + + expect(result.data).toEqual(data2); + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(observable.options.fetchPolicy).toBe("cache-first"); } - ); - }); - describe("refetch", () => { - itAsync( - "calls fetchRequest with fetchPolicy `network-only` when using a non-networked fetch policy", - (resolve, reject) => { - const mockedResponses = [ - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, - }, - ]; + expect(usedFetchPolicies).toEqual([ + "cache-and-network", + "network-only", + "cache-and-network", + "cache-and-network", + ]); - const queryManager = mockQueryManager(...mockedResponses); - const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ - query: firstRequest.query, - variables: firstRequest.variables, - fetchPolicy: "cache-first", - }); + await expect(stream).not.toEmitAnything(); + }); - const mocks = mockFetchQuery(queryManager); + it("cache-and-network refetch should run @client(always: true) resolvers when network request fails", async () => { + const query = gql` + query MixedQuery { + counter @client(always: true) + name + } + `; - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: dataOne, - }); + let count = 0; - observable.refetch(differentVariables); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: dataTwo, - }); + let linkObservable = Observable.of({ + data: { + name: "Ben", + }, + }); - const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; - expect(fqbpCalls.length).toBe(2); - expect(fqbpCalls[0][1].fetchPolicy).toEqual("cache-first"); - expect(fqbpCalls[1][1].fetchPolicy).toEqual("network-only"); - - const fqoCalls = mocks.fetchConcastWithInfo.mock.calls; - expect(fqoCalls.length).toBe(2); - expect(fqoCalls[0][1].fetchPolicy).toEqual("cache-first"); - expect(fqoCalls[1][1].fetchPolicy).toEqual("network-only"); - - // Although the options.fetchPolicy we passed just now to - // fetchQueryByPolicy should have been network-only, - // observable.options.fetchPolicy should now be updated to - // cache-first, thanks to options.nextFetchPolicy. - expect(observable.options.fetchPolicy).toBe("cache-first"); - - // Give the test time to fail if more results are delivered. - setTimeout(resolve, 50); - } else { - reject(new Error(`too many results (${count}, ${result})`)); - } - }); - } - ); + const intentionalNetworkFailure = new ApolloError({ + networkError: new Error("intentional network failure"), + }); - it("calling refetch with different variables before the query itself resolved will only yield the result for the new variables", async () => { - const observers: SubscriptionObserver>[] = []; - const queryManager = new QueryManager( - getDefaultOptionsForQueryManagerTests({ - cache: new InMemoryCache(), - link: new ApolloLink((operation, forward) => { - return new Observable((observer) => { - observers.push(observer); - }); - }), - }) + const errorObservable: typeof linkObservable = new Observable( + (observer) => { + observer.error(intentionalNetworkFailure); + } ); - const observableQuery = queryManager.watchQuery({ - query, - variables: { id: 1 }, - }); - const stream = new ObservableStream(observableQuery); - observableQuery.refetch({ id: 2 }); + const client = new ApolloClient({ + link: new ApolloLink(() => linkObservable), + cache: new InMemoryCache(), + resolvers: { + Query: { + counter() { + return ++count; + }, + }, + }, + }); - observers[0].next({ data: dataOne }); - observers[0].complete(); + const observable = client.watchQuery({ + query, + fetchPolicy: "cache-and-network", + returnPartialData: true, + }); - observers[1].next({ data: dataTwo }); - observers[1].complete(); + const stream = new ObservableStream(observable); { const result = await stream.takeNext(); + expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: dataTwo, + data: { + counter: 1, + }, + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, }); } - expect(stream.take()).rejects.toThrow(/Timeout/i); - }); - - it("calling refetch multiple times with different variables will return only results for the most recent variables", async () => { - const observers: SubscriptionObserver>[] = []; - const queryManager = new QueryManager( - getDefaultOptionsForQueryManagerTests({ - cache: new InMemoryCache(), - link: new ApolloLink((operation, forward) => { - return new Observable((observer) => { - observers.push(observer); - }); - }), - }) - ); - const observableQuery = queryManager.watchQuery({ - query, - variables: { id: 1 }, - }); - const stream = new ObservableStream(observableQuery); - - observers[0].next({ data: dataOne }); - observers[0].complete(); { const result = await stream.takeNext(); + expect(result).toEqual({ + data: { + counter: 2, + name: "Ben", + }, loading: false, networkStatus: NetworkStatus.ready, - data: dataOne, }); } - observableQuery.refetch({ id: 2 }); - observableQuery.refetch({ id: 3 }); - - observers[1].next({ data: dataTwo }); - observers[1].complete(); + const oldLinkObs = linkObservable; + // Make the next network request fail. + linkObservable = errorObservable; - observers[2].next({ - data: { - people_one: { - name: "SomeOneElse", - }, - }, - }); - observers[2].complete(); + await expect(() => observable.refetch()).rejects.toThrow( + intentionalNetworkFailure + ); { const result = await stream.takeNext(); + expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, data: { - people_one: { - name: "SomeOneElse", - }, - }, - }); - } - }); - - itAsync( - "calls fetchRequest with fetchPolicy `no-cache` when using `no-cache` fetch policy", - (resolve, reject) => { - const mockedResponses = [ - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables: differentVariables }, - result: { data: dataTwo }, + counter: 3, + name: "Ben", }, - ]; - - const queryManager = mockQueryManager(...mockedResponses); - const firstRequest = mockedResponses[0].request; - const observable = queryManager.watchQuery({ - query: firstRequest.query, - variables: firstRequest.variables, - fetchPolicy: "no-cache", + loading: true, + networkStatus: NetworkStatus.refetch, }); + } - const mocks = mockFetchQuery(queryManager); + { + const error = await stream.takeError(); - subscribeAndCount(reject, observable, (handleCount) => { - if (handleCount === 1) { - observable.refetch(differentVariables); - } else if (handleCount === 2) { - const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; - expect(fqbpCalls.length).toBe(2); - expect(fqbpCalls[1][1].fetchPolicy).toBe("no-cache"); + expect(error).toBe(intentionalNetworkFailure); + } - // Unlike network-only or cache-and-network, the no-cache - // FetchPolicy does not switch to cache-first after the first - // network request. - expect(observable.options.fetchPolicy).toBe("no-cache"); - const fqoCalls = mocks.fetchConcastWithInfo.mock.calls; - expect(fqoCalls.length).toBe(2); - expect(fqoCalls[1][1].fetchPolicy).toBe("no-cache"); + // Switch back from errorObservable. + linkObservable = oldLinkObs; - resolve(); - } + { + const result = await observable.refetch(); + + expect(result).toEqual({ + data: { + counter: 5, + name: "Ben", + }, + loading: false, + networkStatus: NetworkStatus.ready, }); } - ); - itAsync( - "calls ObservableQuery.next even after hitting cache", - (resolve, reject) => { - // This query and variables are copied from react-apollo - const queryWithVars = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name - } + await expect(stream).not.toEmitAnything(); + }); + + describe("warnings about refetch({ variables })", () => { + it("should warn if passed { variables } and query does not declare any variables", async () => { + using _ = spyOnConsole("warn"); + + const queryWithoutVariables = gql` + query QueryWithoutVariables { + getVars { + __typename + name } } `; - const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; - const variables1 = { first: 0 }; - - const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; - const variables2 = { first: 1 }; - - const queryManager = mockQueryManager( - { - request: { - query: queryWithVars, - variables: variables1, + function makeMock(...vars: string[]) { + const requestWithoutVariables = { + query: queryWithoutVariables, + variables: { + variables: vars, }, - result: { data }, - }, - { - request: { - query: queryWithVars, - variables: variables2, - }, - result: { data: data2 }, - }, - { - request: { - query: queryWithVars, - variables: variables1, + }; + + const resultWithVariables = { + data: { + getVars: vars.map((name) => ({ + __typename: "Var", + name, + })), }, - result: { data }, - } + }; + + return { + request: requestWithoutVariables, + result: resultWithVariables, + }; + } + + const observableWithoutVariables = mockWatchQuery( + makeMock("a", "b", "c"), + makeMock("d", "e") ); - const observable = queryManager.watchQuery({ - query: queryWithVars, - variables: variables1, - fetchPolicy: "cache-and-network", - notifyOnNetworkStatusChange: true, - }); + const stream = new ObservableStream(observableWithoutVariables); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - observable.refetch(variables2); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 3) { - expect(result.data).toEqual(data2); - expect(result.loading).toBe(false); - observable.refetch(variables1); - } else if (handleCount === 4) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - } else if (handleCount === 5) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - resolve(); - } + { + const result = await stream.takeNext(); + + expect(result.error).toBeUndefined(); + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + getVars: [ + { __typename: "Var", name: "a" }, + { __typename: "Var", name: "b" }, + { __typename: "Var", name: "c" }, + ], + }); + } + + await observableWithoutVariables.refetch({ + variables: ["d", "e"], }); - } - ); - itAsync( - "resets fetchPolicy when variables change when using nextFetchPolicy", - (resolve, reject) => { - // This query and variables are copied from react-apollo - const queryWithVars = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name - } + { + const result = await stream.takeNext(); + + expect(result.error).toBeUndefined(); + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + getVars: [ + { __typename: "Var", name: "d" }, + { __typename: "Var", name: "e" }, + ], + }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + [ + "Called refetch(%o) for query %o, which does not declare a $variables variable.", + "Did you mean to call refetch(variables) instead of refetch({ variables })?", + ].join("\n"), + { variables: ["d", "e"] }, + "QueryWithoutVariables" + ); + } + + await expect(stream).not.toEmitAnything(); + }); + + it("should warn if passed { variables } and query does not declare $variables", async () => { + using _ = spyOnConsole("warn"); + + const queryWithVarsVar = gql` + query QueryWithVarsVar($vars: [String!]) { + getVars(variables: $vars) { + __typename + name } } `; - const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; - const variables1 = { first: 0 }; - - const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; - const variables2 = { first: 1 }; + function makeMock(...vars: string[]) { + const requestWithVarsVar = { + query: queryWithVarsVar, + variables: { vars }, + }; - const queryManager = mockQueryManager( - { - request: { - query: queryWithVars, - variables: variables1, - }, - result: { data }, - }, - { - request: { - query: queryWithVars, - variables: variables2, + const resultWithVarsVar = { + data: { + getVars: vars.map((name) => ({ + __typename: "Var", + name, + })), }, - result: { data: data2 }, - }, - { - request: { - query: queryWithVars, - variables: variables1, - }, - result: { data }, - }, - { - request: { - query: queryWithVars, - variables: variables2, - }, - result: { data: data2 }, - } + }; + + return { + request: requestWithVarsVar, + result: resultWithVarsVar, + }; + } + + // We construct the queryManager manually here rather than using + // `mockWatchQuery` because we need to silence console warnings for + // unmatched variables since. This test checks for calls to + // `console.warn` and unfortunately `mockSingleLink` (used by + // `mockWatchQuery`) does not support the ability to disable warnings + // without introducing a breaking change. Instead we construct this + // manually to be able to turn off warnings for this test. + const mocks = [makeMock("a", "b", "c"), makeMock("d", "e")]; + const firstRequest = mocks[0].request; + const queryManager = new QueryManager( + getDefaultOptionsForQueryManagerTests({ + cache: new InMemoryCache({ addTypename: false }), + link: new MockLink(mocks, true, { showWarnings: false }), + }) ); - const usedFetchPolicies: WatchQueryFetchPolicy[] = []; - const observable = queryManager.watchQuery({ - query: queryWithVars, - variables: variables1, - fetchPolicy: "cache-and-network", - nextFetchPolicy(currentFetchPolicy, info) { - if (info.reason === "variables-changed") { - return info.initialFetchPolicy; - } - usedFetchPolicies.push(currentFetchPolicy); - if (info.reason === "after-fetch") { - return "cache-first"; - } - return currentFetchPolicy; - }, - notifyOnNetworkStatusChange: true, + const observableWithVarsVar = queryManager.watchQuery({ + query: firstRequest.query, + variables: firstRequest.variables, + notifyOnNetworkStatusChange: false, }); - expect(observable.options.fetchPolicy).toBe("cache-and-network"); - expect(observable.options.initialFetchPolicy).toBe("cache-and-network"); + const stream = new ObservableStream(observableWithVarsVar); + + { + const result = await stream.takeNext(); - subscribeAndCount(reject, observable, (handleCount, result) => { + expect(result.loading).toBe(false); expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ + getVars: [ + { __typename: "Var", name: "a" }, + { __typename: "Var", name: "b" }, + { __typename: "Var", name: "c" }, + ], + }); + } - if (handleCount === 1) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - expect(observable.options.fetchPolicy).toBe("cache-first"); - observable.refetch(variables2); - } else if (handleCount === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (handleCount === 3) { - expect(result.data).toEqual(data2); - expect(result.loading).toBe(false); - expect(observable.options.fetchPolicy).toBe("cache-first"); - observable - .setOptions({ - variables: variables1, - }) - .then((result) => { - expect(result.data).toEqual(data); - }) - .catch(reject); - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (handleCount === 4) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (handleCount === 5) { - expect(result.data).toEqual(data); - expect(result.loading).toBe(false); - expect(observable.options.fetchPolicy).toBe("cache-first"); - observable - .reobserve({ - variables: variables2, - }) - .then((result) => { - expect(result.data).toEqual(data2); - }) - .catch(reject); - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (handleCount === 6) { - expect(result.data).toEqual(data2); - expect(result.loading).toBe(true); - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (handleCount === 7) { - expect(result.data).toEqual(data2); - expect(result.loading).toBe(false); - expect(observable.options.fetchPolicy).toBe("cache-first"); - - expect(usedFetchPolicies).toEqual([ - "cache-and-network", - "network-only", - "cache-and-network", - "cache-and-network", - ]); - - setTimeout(resolve, 10); - } else { - reject(`too many renders (${handleCount})`); - } + // It's a common mistake to call refetch({ variables }) when you meant + // to call refetch(variables). + const promise = observableWithVarsVar.refetch({ + // @ts-expect-error + variables: { vars: ["d", "e"] }, }); - } - ); - itAsync( - "cache-and-network refetch should run @client(always: true) resolvers when network request fails", - (resolve, reject) => { - const query = gql` - query MixedQuery { - counter @client(always: true) - name - } - `; + { + const error = await stream.takeError(); - let count = 0; + expect(error.message).toMatch( + "No more mocked responses for the query: query QueryWithVarsVar($vars: [String!])" + ); + } - let linkObservable = Observable.of({ - data: { - name: "Ben", - }, - }); + await expect(promise).rejects.toEqual( + expect.objectContaining({ + message: expect.stringMatching( + /No more mocked responses for the query: query QueryWithVarsVar\(\$vars: \[String!\]\)/ + ), + }) + ); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + [ + "Called refetch(%o) for query %o, which does not declare a $variables variable.", + "Did you mean to call refetch(variables) instead of refetch({ variables })?", + ].join("\n"), + { variables: { vars: ["d", "e"] } }, + "QueryWithVarsVar" + ); - const intentionalNetworkFailure = new ApolloError({ - networkError: new Error("intentional network failure"), - }); + await expect(stream).not.toEmitAnything(); + }); + it("should not warn if passed { variables } and query declares $variables", async () => { + using _ = spyOnConsole("warn"); - const errorObservable: typeof linkObservable = new Observable( - (observer) => { - observer.error(intentionalNetworkFailure); + const queryWithVariablesVar = gql` + query QueryWithVariablesVar($variables: [String!]) { + getVars(variables: $variables) { + __typename + name + } } - ); + `; - const client = new ApolloClient({ - link: new ApolloLink((request) => linkObservable), - cache: new InMemoryCache(), - resolvers: { - Query: { - counter() { - return ++count; - }, + function makeMock(...variables: string[]) { + const requestWithVariablesVar = { + query: queryWithVariablesVar, + variables: { + variables, }, - }, - }); - - const observable = client.watchQuery({ - query, - fetchPolicy: "cache-and-network", - returnPartialData: true, - }); + }; - let handleCount = 0; - observable.subscribe({ - error(error) { - expect(error).toBe(intentionalNetworkFailure); - }, + const resultWithVariablesVar = { + data: { + getVars: variables.map((name) => ({ + __typename: "Var", + name, + })), + }, + }; - next(result) { - ++handleCount; + return { + request: requestWithVariablesVar, + result: resultWithVariablesVar, + }; + } - if (handleCount === 1) { - expect(result).toEqual({ - data: { - counter: 1, - }, - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - } else if (handleCount === 2) { - expect(result).toEqual({ - data: { - counter: 2, - name: "Ben", - }, - loading: false, - networkStatus: NetworkStatus.ready, - }); + const observableWithVariablesVar = mockWatchQuery( + makeMock("a", "b", "c"), + makeMock("d", "e") + ); - const oldLinkObs = linkObservable; - // Make the next network request fail. - linkObservable = errorObservable; + const stream = new ObservableStream(observableWithVariablesVar); - observable.refetch().then( - () => { - reject(new Error("should have gotten an error")); - }, + { + const result = await stream.takeNext(); - (error) => { - expect(error).toBe(intentionalNetworkFailure); - - // Switch back from errorObservable. - linkObservable = oldLinkObs; - - observable.refetch().then((result) => { - expect(result).toEqual({ - data: { - counter: 5, - name: "Ben", - }, - loading: false, - networkStatus: NetworkStatus.ready, - }); - setTimeout(resolve, 50); - }, reject); - } - ); - } else if (handleCount === 3) { - expect(result).toEqual({ - data: { - counter: 3, - name: "Ben", - }, - loading: true, - networkStatus: NetworkStatus.refetch, - }); - } else if (handleCount > 3) { - reject(new Error("should not get here")); - } - }, - }); - } - ); + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ + getVars: [ + { __typename: "Var", name: "a" }, + { __typename: "Var", name: "b" }, + { __typename: "Var", name: "c" }, + ], + }); + } - describe("warnings about refetch({ variables })", () => { - itAsync( - "should warn if passed { variables } and query does not declare any variables", - (resolve, reject) => { - const consoleWarnSpy = jest.spyOn(console, "warn"); - consoleWarnSpy.mockImplementation(() => {}); - - const queryWithoutVariables = gql` - query QueryWithoutVariables { - getVars { - __typename - name - } - } - `; + await observableWithVariablesVar.refetch({ variables: ["d", "e"] }); - function makeMock(...vars: string[]) { - const requestWithoutVariables = { - query: queryWithoutVariables, - variables: { - variables: vars, - }, - }; - - const resultWithVariables = { - data: { - getVars: vars.map((name) => ({ - __typename: "Var", - name, - })), - }, - }; + { + const result = await stream.takeNext(); - return { - request: requestWithoutVariables, - result: resultWithVariables, - }; - } + expect(result.loading).toBe(false); + expect(result.error).toBeUndefined(); + expect(result.data).toEqual({ + getVars: [ + { __typename: "Var", name: "d" }, + { __typename: "Var", name: "e" }, + ], + }); - const observableWithoutVariables: ObservableQuery = - mockWatchQuery(makeMock("a", "b", "c"), makeMock("d", "e")); - - subscribeAndCount( - reject, - observableWithoutVariables, - (count, result) => { - expect(result.error).toBeUndefined(); - - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ - getVars: [ - { __typename: "Var", name: "a" }, - { __typename: "Var", name: "b" }, - { __typename: "Var", name: "c" }, - ], - }); - - // It's a common mistake to call refetch({ variables }) when you meant - // to call refetch(variables). - observableWithoutVariables - .refetch({ - variables: ["d", "e"], - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ - getVars: [ - { __typename: "Var", name: "d" }, - { __typename: "Var", name: "e" }, - ], - }); - - expect(consoleWarnSpy).toHaveBeenCalledTimes(1); - expect(consoleWarnSpy).toHaveBeenCalledWith( - [ - "Called refetch(%o) for query %o, which does not declare a $variables variable.", - "Did you mean to call refetch(variables) instead of refetch({ variables })?", - ].join("\n"), - { variables: ["d", "e"] }, - "QueryWithoutVariables" - ); - consoleWarnSpy.mockRestore(); - - setTimeout(resolve, 10); - } else { - reject(`too many results (${count})`); - } - } - ); + expect(console.warn).not.toHaveBeenCalled(); } - ); - itAsync( - "should warn if passed { variables } and query does not declare $variables", - (resolve, reject) => { - const consoleWarnSpy = jest.spyOn(console, "warn"); - consoleWarnSpy.mockImplementation(() => {}); + await expect(stream).not.toEmitAnything(); + }); + }); + }); - const queryWithVarsVar = gql` - query QueryWithVarsVar($vars: [String!]) { - getVars(variables: $vars) { - __typename - name - } - } - `; + describe("currentResult", () => { + it("returns the same value as observableQuery.next got", async () => { + const queryWithFragment = gql` + fragment CatInfo on Cat { + isTabby + __typename + } - function makeMock(...vars: string[]) { - const requestWithVarsVar = { - query: queryWithVarsVar, - variables: { vars }, - }; - - const resultWithVarsVar = { - data: { - getVars: vars.map((name) => ({ - __typename: "Var", - name, - })), - }, - }; + fragment DogInfo on Dog { + hasBrindleCoat + __typename + } - return { - request: requestWithVarsVar, - result: resultWithVarsVar, - }; + fragment PetInfo on Pet { + id + name + age + ... on Cat { + ...CatInfo + __typename + } + ... on Dog { + ...DogInfo + __typename } + __typename + } - // We construct the queryManager manually here rather than using - // `mockWatchQuery` because we need to silence console warnings for - // unmatched variables since. This test checks for calls to - // `console.warn` and unfortunately `mockSingleLink` (used by - // `mockWatchQuery`) does not support the ability to disable warnings - // without introducing a breaking change. Instead we construct this - // manually to be able to turn off warnings for this test. - const mocks = [makeMock("a", "b", "c"), makeMock("d", "e")]; - const firstRequest = mocks[0].request; - const queryManager = new QueryManager( - getDefaultOptionsForQueryManagerTests({ - cache: new InMemoryCache({ addTypename: false }), - link: new MockLink(mocks, true, { showWarnings: false }), - }) - ); + { + pets { + ...PetInfo + __typename + } + } + `; - const observableWithVarsVar = queryManager.watchQuery({ - query: firstRequest.query, - variables: firstRequest.variables, - notifyOnNetworkStatusChange: false, - }); + const petData = [ + { + id: 1, + name: "Phoenix", + age: 6, + isTabby: true, + __typename: "Cat", + }, + { + id: 2, + name: "Tempe", + age: 3, + isTabby: false, + __typename: "Cat", + }, + { + id: 3, + name: "Robin", + age: 10, + hasBrindleCoat: true, + __typename: "Dog", + }, + ]; - subscribeAndCount( - (error) => { - expect(error.message).toMatch( - "No more mocked responses for the query: query QueryWithVarsVar($vars: [String!])" - ); - }, - observableWithVarsVar, - (count, result) => { - expect(result.error).toBeUndefined(); - - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ - getVars: [ - { __typename: "Var", name: "a" }, - { __typename: "Var", name: "b" }, - { __typename: "Var", name: "c" }, - ], - }); - - // It's a common mistake to call refetch({ variables }) when you meant - // to call refetch(variables). - observableWithVarsVar - .refetch({ - variables: { vars: ["d", "e"] }, - } as any) - .then( - (result) => { - reject( - `unexpected result ${JSON.stringify( - result - )}; should have thrown` - ); - }, - (error) => { - expect(error.message).toMatch( - "No more mocked responses for the query: query QueryWithVarsVar($vars: [String!])" - ); - expect(consoleWarnSpy).toHaveBeenCalledTimes(1); - expect(consoleWarnSpy).toHaveBeenCalledWith( - [ - "Called refetch(%o) for query %o, which does not declare a $variables variable.", - "Did you mean to call refetch(variables) instead of refetch({ variables })?", - ].join("\n"), - { variables: { vars: ["d", "e"] } }, - "QueryWithVarsVar" - ); - consoleWarnSpy.mockRestore(); - - setTimeout(resolve, 10); - } - ); - } else { - reject( - `one too many (${count}) results: ${JSON.stringify(result)}` - ); - } - } - ); + const dataOneWithTypename = { + pets: petData.slice(0, 2), + }; + + const dataTwoWithTypename = { + pets: petData.slice(0, 3), + }; + + const ni = mockSingleLink( + { + request: { query: queryWithFragment, variables }, + result: { data: dataOneWithTypename }, + }, + { + request: { query: queryWithFragment, variables }, + result: { data: dataTwoWithTypename }, } ); - itAsync( - "should not warn if passed { variables } and query declares $variables", - (resolve, reject) => { - const consoleWarnSpy = jest.spyOn(console, "warn"); - consoleWarnSpy.mockImplementation(() => {}); + const client = new ApolloClient({ + link: ni, + cache: new InMemoryCache({ + possibleTypes: { + Creature: ["Pet"], + Pet: ["Dog", "Cat"], + }, + }), + }); - const queryWithVariablesVar = gql` - query QueryWithVariablesVar($variables: [String!]) { - getVars(variables: $variables) { - __typename - name - } - } - `; + const observable = client.watchQuery({ + query: queryWithFragment, + variables, + notifyOnNetworkStatusChange: true, + }); - function makeMock(...variables: string[]) { - const requestWithVariablesVar = { - query: queryWithVariablesVar, - variables: { - variables, - }, - }; - - const resultWithVariablesVar = { - data: { - getVars: variables.map((name) => ({ - __typename: "Var", - name, - })), - }, - }; + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(false); + expect(result.networkStatus).toEqual(NetworkStatus.ready); + expect(result.data).toEqual(dataOneWithTypename); + expect(observable.getCurrentResult()).toEqual(result); + } + + void observable.refetch(); + + { + const result = await stream.takeNext(); + + expect(result.loading).toBe(true); + expect(result.networkStatus).toEqual(NetworkStatus.refetch); + expect(observable.getCurrentResult()).toEqual(result); + } + + { + const result = await stream.takeNext(); - return { - request: requestWithVariablesVar, - result: resultWithVariablesVar, - }; - } + expect(result.loading).toBe(false); + expect(result.networkStatus).toEqual(NetworkStatus.ready); + expect(result.data).toEqual(dataTwoWithTypename); + expect(observable.getCurrentResult()).toEqual(result); + } - const observableWithVariablesVar: ObservableQuery = - mockWatchQuery(makeMock("a", "b", "c"), makeMock("d", "e")); - - subscribeAndCount( - reject, - observableWithVariablesVar, - (count, result) => { - expect(result.error).toBeUndefined(); - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ - getVars: [ - { __typename: "Var", name: "a" }, - { __typename: "Var", name: "b" }, - { __typename: "Var", name: "c" }, - ], - }); - - observableWithVariablesVar - .refetch({ - variables: ["d", "e"], - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ - getVars: [ - { __typename: "Var", name: "d" }, - { __typename: "Var", name: "e" }, - ], - }); - - expect(consoleWarnSpy).not.toHaveBeenCalled(); - consoleWarnSpy.mockRestore(); - - setTimeout(resolve, 10); - } else { - reject(`too many results (${count})`); - } - } - ); - } - ); + await expect(stream).not.toEmitAnything(); }); - }); - describe("currentResult", () => { - itAsync( - "returns the same value as observableQuery.next got", - (resolve, reject) => { - const queryWithFragment = gql` - fragment CatInfo on Cat { - isTabby - __typename - } + it("returns the current query status immediately", async () => { + const observable = mockWatchQuery({ + request: { query, variables }, + result: { data: dataOne }, + delay: 100, + }); - fragment DogInfo on Dog { - hasBrindleCoat - __typename - } + const stream = new ObservableStream(observable); - fragment PetInfo on Pet { - id - name - age - ... on Cat { - ...CatInfo - __typename - } - ... on Dog { - ...DogInfo - __typename - } - __typename - } + expect(observable.getCurrentResult()).toEqual({ + loading: true, + data: undefined, + networkStatus: 1, + partial: true, + }); - { - pets { - ...PetInfo - __typename - } - } - `; + await tick(); - const petData = [ - { - id: 1, - name: "Phoenix", - age: 6, - isTabby: true, - __typename: "Cat", - }, - { - id: 2, - name: "Tempe", - age: 3, - isTabby: false, - __typename: "Cat", - }, - { - id: 3, - name: "Robin", - age: 10, - hasBrindleCoat: true, - __typename: "Dog", - }, - ]; + expect(observable.getCurrentResult()).toEqual({ + loading: true, + data: undefined, + networkStatus: 1, + partial: true, + }); - const dataOneWithTypename = { - pets: petData.slice(0, 2), - }; + await stream.takeNext(); - const dataTwoWithTypename = { - pets: petData.slice(0, 3), - }; + expect(observable.getCurrentResult()).toEqual({ + data: dataOne, + loading: false, + networkStatus: 7, + }); + }); - const ni = mockSingleLink( - { - request: { query: queryWithFragment, variables }, - result: { data: dataOneWithTypename }, - }, - { - request: { query: queryWithFragment, variables }, - result: { data: dataTwoWithTypename }, - } - ).setOnError(reject); + it("returns results from the store immediately", async () => { + const queryManager = mockQueryManager({ + request: { query, variables }, + result: { data: dataOne }, + }); - const client = new ApolloClient({ - link: ni, - cache: new InMemoryCache({ - possibleTypes: { - Creature: ["Pet"], - Pet: ["Dog", "Cat"], - }, - }), - }); + const result = await queryManager.query({ query, variables }); - const observable = client.watchQuery({ - query: queryWithFragment, - variables, - notifyOnNetworkStatusChange: true, - }); + expect(result).toEqual({ + data: dataOne, + loading: false, + networkStatus: 7, + }); - subscribeAndCount(reject, observable, (count, result) => { - const { data, loading, networkStatus } = - observable.getCurrentResult(); - expect(result.loading).toEqual(loading); - expect(result.networkStatus).toEqual(networkStatus); - expect(result.data).toEqual(data); - - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toEqual(NetworkStatus.ready); - expect(result.data).toEqual(dataOneWithTypename); - observable.refetch(); - } else if (count === 2) { - expect(result.loading).toBe(true); - expect(result.networkStatus).toEqual(NetworkStatus.refetch); - } else if (count === 3) { - expect(result.loading).toBe(false); - expect(result.networkStatus).toEqual(NetworkStatus.ready); - expect(result.data).toEqual(dataTwoWithTypename); - setTimeout(resolve, 5); - } else { - reject(new Error("Observable.next called too many times")); - } - }); - } - ); + const observable = queryManager.watchQuery({ query, variables }); - itAsync( - "returns the current query status immediately", - (resolve, reject) => { - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - delay: 100, - }); + expect(observable.getCurrentResult()).toEqual({ + data: dataOne, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); - subscribeAndCount(reject, observable, () => { - expect(observable.getCurrentResult()).toEqual({ - data: dataOne, - loading: false, - networkStatus: 7, - }); - resolve(); - }); + it("returns errors from the store immediately", async () => { + const queryManager = mockQueryManager({ + request: { query, variables }, + result: { errors: [error] }, + }); - expect(observable.getCurrentResult()).toEqual({ - loading: true, - data: undefined, - networkStatus: 1, - partial: true, - }); + const observable = queryManager.watchQuery({ query, variables }); + const stream = new ObservableStream(observable); - setTimeout( - wrap(reject, () => { - expect(observable.getCurrentResult()).toEqual({ - loading: true, - data: undefined, - networkStatus: 1, - partial: true, - }); - }), - 0 - ); - } - ); + const theError = await stream.takeError(); + const currentResult = observable.getCurrentResult(); - itAsync("returns results from the store immediately", (resolve, reject) => { + expect(theError.graphQLErrors).toEqual([error]); + expect(currentResult.loading).toBe(false); + expect(currentResult.error!.graphQLErrors).toEqual([error]); + }); + + it("returns referentially equal errors", async () => { const queryManager = mockQueryManager({ request: { query, variables }, - result: { data: dataOne }, + result: { errors: [error] }, }); - return queryManager - .query({ query, variables }) - .then((result: any) => { - expect(result).toEqual({ - data: dataOne, - loading: false, - networkStatus: 7, - }); - const observable = queryManager.watchQuery({ - query, - variables, - }); - expect(observable.getCurrentResult()).toEqual({ - data: dataOne, - loading: false, - networkStatus: NetworkStatus.ready, - }); - }) - .then(resolve, reject); + const observable = queryManager.watchQuery({ query, variables }); + + await expect(observable.result()).rejects.toThrow( + new ApolloError({ graphQLErrors: [error] }) + ); + + const currentResult = observable.getCurrentResult(); + const currentResult2 = observable.getCurrentResult(); + + expect(currentResult.loading).toBe(false); + expect(currentResult.error!.graphQLErrors).toEqual([error]); + expect(currentResult.error === currentResult2.error).toBe(true); }); - itAsync("returns errors from the store immediately", (resolve) => { + it("returns errors with data if errorPolicy is all", async () => { const queryManager = mockQueryManager({ request: { query, variables }, - result: { errors: [error] }, + result: { data: dataOne, errors: [error] }, }); const observable = queryManager.watchQuery({ query, variables, + errorPolicy: "all", }); - observable.subscribe({ - error: (theError) => { - expect(theError.graphQLErrors).toEqual([error]); + const result = await observable.result(); + const currentResult = observable.getCurrentResult(); - const currentResult = observable.getCurrentResult(); - expect(currentResult.loading).toBe(false); - expect(currentResult.error!.graphQLErrors).toEqual([error]); - resolve(); - }, - }); + expect(result.data).toEqual(dataOne); + expect(result.errors).toEqual([error]); + expect(currentResult.loading).toBe(false); + expect(currentResult.errors).toEqual([error]); + expect(currentResult.error).toBeUndefined(); }); - itAsync("returns referentially equal errors", (resolve, reject) => { + it("errors out if errorPolicy is none", async () => { const queryManager = mockQueryManager({ request: { query, variables }, - result: { errors: [error] }, + result: { data: dataOne, errors: [error] }, }); const observable = queryManager.watchQuery({ query, variables, + errorPolicy: "none", }); - return observable - .result() - .catch((theError: any) => { - expect(theError.graphQLErrors).toEqual([error]); + await expect(observable.result()).rejects.toEqual(wrappedError); - const currentResult = observable.getCurrentResult(); - expect(currentResult.loading).toBe(false); - expect(currentResult.error!.graphQLErrors).toEqual([error]); - const currentResult2 = observable.getCurrentResult(); - expect(currentResult.error === currentResult2.error).toBe(true); - }) - .then(resolve, reject); + expect(observable.getLastError()).toEqual(wrappedError); }); - itAsync( - "returns errors with data if errorPolicy is all", - (resolve, reject) => { - const queryManager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne, errors: [error] }, - }); + it("errors out if errorPolicy is none and the observable has completed", async () => { + const queryManager = mockQueryManager({ + request: { query, variables }, + result: { data: dataOne, errors: [error] }, + }); - const observable = queryManager.watchQuery({ - query, - variables, - errorPolicy: "all", - }); + const observable = queryManager.watchQuery({ + query, + variables, + errorPolicy: "none", + }); - return observable - .result() - .then((result) => { - expect(result.data).toEqual(dataOne); - expect(result.errors).toEqual([error]); - const currentResult = observable.getCurrentResult(); - expect(currentResult.loading).toBe(false); - expect(currentResult.errors).toEqual([error]); - expect(currentResult.error).toBeUndefined(); - }) - .then(resolve, reject); - } - ); + await expect(observable.result()).rejects.toEqual(wrappedError); + await expect(observable.result()).rejects.toEqual(wrappedError); + + expect(observable.getLastError()).toEqual(wrappedError); + }); - itAsync("errors out if errorPolicy is none", (resolve, reject) => { + it("ignores errors with data if errorPolicy is ignore", async () => { const queryManager = mockQueryManager({ request: { query, variables }, - result: { data: dataOne, errors: [error] }, + result: { errors: [error], data: dataOne }, }); const observable = queryManager.watchQuery({ query, variables, - errorPolicy: "none", + errorPolicy: "ignore", }); - return observable - .result() - .then(() => reject("Observable did not error when it should have")) - .catch((currentError) => { - expect(currentError).toEqual(wrappedError); - const lastError = observable.getLastError(); - expect(lastError).toEqual(wrappedError); - resolve(); - }) - .catch(reject); + const result = await observable.result(); + const currentResult = observable.getCurrentResult(); + + expect(result.data).toEqual(dataOne); + expect(result.errors).toBeUndefined(); + expect(currentResult.loading).toBe(false); + expect(currentResult.errors).toBeUndefined(); + expect(currentResult.error).toBeUndefined(); }); - itAsync( - "errors out if errorPolicy is none and the observable has completed", - (resolve, reject) => { - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne, errors: [error] }, - }, - // FIXME: We shouldn't need a second mock, there should only be one network request - { - request: { query, variables }, - result: { data: dataOne, errors: [error] }, + it("returns partial data from the store immediately", async () => { + const superQuery = gql` + query superQuery($id: ID!) { + people_one(id: $id) { + name + age } - ); + } + `; - const observable = queryManager.watchQuery({ - query, - variables, - errorPolicy: "none", + const superDataOne = { + people_one: { + name: "Luke Skywalker", + age: 21, + }, + }; + + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query: superQuery, variables }, + result: { data: superDataOne }, + } + ); + + await queryManager.query({ query, variables }); + + const observable = queryManager.watchQuery({ + query: superQuery, + variables, + returnPartialData: true, + }); + + expect(observable.getCurrentResult()).toEqual({ + data: dataOne, + loading: true, + networkStatus: 1, + partial: true, + }); + + const stream = new ObservableStream(observable); + + { + const result = await stream.takeNext(); + const current = observable.getCurrentResult(); + + expect(result).toEqual({ + data: dataOne, + loading: true, + networkStatus: 1, + partial: true, }); + expect(current.data).toEqual(dataOne); + expect(current.loading).toEqual(true); + expect(current.networkStatus).toEqual(1); + } - return ( - observable - .result() - .then(() => reject("Observable did not error when it should have")) - // We wait for the observable to error out and reobtain a promise - .catch(() => observable.result()) - .then((result) => - reject( - "Observable did not error the second time we fetched results when it should have" - ) - ) - .catch((currentError) => { - expect(currentError).toEqual(wrappedError); - const lastError = observable.getLastError(); - expect(lastError).toEqual(wrappedError); - resolve(); - }) - .catch(reject) - ); + { + const result = await stream.takeNext(); + const current = observable.getCurrentResult(); + + expect(result).toEqual({ + data: superDataOne, + loading: false, + networkStatus: 7, + }); + expect(current.data).toEqual(superDataOne); + expect(current.loading).toEqual(false); + expect(current.networkStatus).toEqual(7); } - ); - itAsync( - "ignores errors with data if errorPolicy is ignore", - (resolve, reject) => { - const queryManager = mockQueryManager({ + await expect(stream).not.toEmitAnything(); + }); + + it("returns loading even if full data is available when using network-only fetchPolicy", async () => { + const queryManager = mockQueryManager( + { request: { query, variables }, - result: { errors: [error], data: dataOne }, - }); + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); - const observable = queryManager.watchQuery({ - query, - variables, - errorPolicy: "ignore", - }); + const result = await queryManager.query({ query, variables }); - return observable - .result() - .then((result) => { - expect(result.data).toEqual(dataOne); - expect(result.errors).toBeUndefined(); - const currentResult = observable.getCurrentResult(); - expect(currentResult.loading).toBe(false); - expect(currentResult.errors).toBeUndefined(); - expect(currentResult.error).toBeUndefined(); - }) - .then(resolve, reject); - } - ); + expect(result).toEqual({ + data: dataOne, + loading: false, + networkStatus: NetworkStatus.ready, + }); - itAsync( - "returns partial data from the store immediately", - (resolve, reject) => { - const superQuery = gql` - query superQuery($id: ID!) { - people_one(id: $id) { - name - age - } - } - `; + const observable = queryManager.watchQuery({ + query, + variables, + fetchPolicy: "network-only", + }); - const superDataOne = { - people_one: { - name: "Luke Skywalker", - age: 21, - }, - }; + expect(observable.getCurrentResult()).toEqual({ + data: undefined, + loading: true, + networkStatus: NetworkStatus.loading, + }); - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query: superQuery, variables }, - result: { data: superDataOne }, - } - ); + const stream = new ObservableStream(observable); - queryManager.query({ query, variables }).then((result) => { - const observable = queryManager.watchQuery({ - query: superQuery, - variables, - returnPartialData: true, - }); + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: true, + data: undefined, + networkStatus: NetworkStatus.loading, + }); + } - expect(observable.getCurrentResult()).toEqual({ - data: dataOne, - loading: true, - networkStatus: 1, - partial: true, - }); + { + const result = await stream.takeNext(); - // we can use this to trigger the query - subscribeAndCount(reject, observable, (handleCount, subResult) => { - const { data, loading, networkStatus } = - observable.getCurrentResult(); - - expect(subResult.data).toEqual(data); - expect(subResult.loading).toEqual(loading); - expect(subResult.networkStatus).toEqual(networkStatus); - - if (handleCount === 1) { - expect(subResult).toEqual({ - data: dataOne, - loading: true, - networkStatus: 1, - partial: true, - }); - } else if (handleCount === 2) { - expect(subResult).toEqual({ - data: superDataOne, - loading: false, - networkStatus: 7, - }); - resolve(); - } - }); + expect(result).toEqual({ + data: dataTwo, + loading: false, + networkStatus: NetworkStatus.ready, }); } - ); - itAsync( - "returns loading even if full data is available when using network-only fetchPolicy", - (resolve, reject) => { - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); + await expect(stream).not.toEmitAnything(); + }); - queryManager.query({ query, variables }).then((result) => { - expect(result).toEqual({ - data: dataOne, - loading: false, - networkStatus: NetworkStatus.ready, - }); + it("returns loading on no-cache fetchPolicy queries when calling getCurrentResult", async () => { + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); - const observable = queryManager.watchQuery({ - query, - variables, - fetchPolicy: "network-only", - }); + await queryManager.query({ query, variables }); - expect(observable.getCurrentResult()).toEqual({ - data: undefined, - loading: true, - networkStatus: NetworkStatus.loading, - }); + const observable = queryManager.watchQuery({ + query, + variables, + fetchPolicy: "no-cache", + }); - subscribeAndCount(reject, observable, (handleCount, subResult) => { - if (handleCount === 1) { - expect(subResult).toEqual({ - loading: true, - data: undefined, - networkStatus: NetworkStatus.loading, - }); - } else if (handleCount === 2) { - expect(subResult).toEqual({ - data: dataTwo, - loading: false, - networkStatus: NetworkStatus.ready, - }); - resolve(); - } - }); - }); - } - ); + expect(observable.getCurrentResult()).toEqual({ + data: undefined, + loading: true, + networkStatus: 1, + }); - itAsync( - "returns loading on no-cache fetchPolicy queries when calling getCurrentResult", - (resolve, reject) => { - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query, variables }, - result: { data: dataTwo }, - } - ); + const stream = new ObservableStream(observable); - queryManager.query({ query, variables }).then(() => { - const observable = queryManager.watchQuery({ - query, - variables, - fetchPolicy: "no-cache", - }); - expect(observable.getCurrentResult()).toEqual({ - data: undefined, - loading: true, - networkStatus: 1, - }); + { + const result = await stream.takeNext(); + const current = observable.getCurrentResult(); - subscribeAndCount(reject, observable, (handleCount, subResult) => { - const { data, loading, networkStatus } = - observable.getCurrentResult(); + expect(result).toEqual({ + data: undefined, + loading: true, + networkStatus: NetworkStatus.loading, + }); + expect(current.data).toBeUndefined(); + expect(current.loading).toBe(true); + expect(current.networkStatus).toBe(NetworkStatus.loading); + } - if (handleCount === 1) { - expect(subResult).toEqual({ - data, - loading, - networkStatus, - }); - } else if (handleCount === 2) { - expect(subResult).toEqual({ - data: dataTwo, - loading: false, - networkStatus: 7, - }); - resolve(); - } - }); + { + const result = await stream.takeNext(); + const current = observable.getCurrentResult(); + + expect(result).toEqual({ + data: dataTwo, + loading: false, + networkStatus: NetworkStatus.ready, }); + expect(current.data).toEqual(dataTwo); + expect(current.loading).toBe(false); + expect(current.networkStatus).toBe(NetworkStatus.ready); } - ); + }); it("handles multiple calls to getCurrentResult without losing data", async () => { const query = gql` @@ -2791,7 +2823,7 @@ describe("ObservableQuery", () => { resultAfterCacheUpdate2 ); - observableQuery.refetch(); + void observableQuery.refetch(); cache.writeQuery({ query, data: cacheValues.update3 }); expect(observableQuery.getCurrentResult()).toStrictEqual( @@ -2841,157 +2873,155 @@ describe("ObservableQuery", () => { }, }; - itAsync( - "returns optimistic mutation results from the store", - (resolve, reject) => { - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data: dataOne }, - }, - { - request: { query: mutation }, - result: { data: mutationData }, - } - ); + it("returns optimistic mutation results from the store", async () => { + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query: mutation }, + result: { data: mutationData }, + } + ); - const observable = queryManager.watchQuery({ - query, - variables, - }); + const observable = queryManager.watchQuery({ + query, + variables, + }); - subscribeAndCount(reject, observable, (count, result) => { - const { data, loading, networkStatus } = - observable.getCurrentResult(); - expect(result).toEqual({ - data, - loading, - networkStatus, - }); + const stream = new ObservableStream(observable); - if (count === 1) { - expect(result).toEqual({ - data: dataOne, - loading: false, - networkStatus: 7, - }); - queryManager.mutate({ - mutation, - optimisticResponse, - updateQueries, - }); - } else if (count === 2) { - expect(result.data.people_one).toEqual(optimisticResponse); - } else if (count === 3) { - expect(result.data.people_one).toEqual(mutationData); - resolve(); - } + { + const result = await stream.takeNext(); + + expect(result).toEqual({ + data: dataOne, + loading: false, + networkStatus: 7, }); + expect(observable.getCurrentResult()).toEqual(result); } - ); + + void queryManager.mutate({ + mutation, + optimisticResponse, + updateQueries, + }); + + { + const result = await stream.takeNext(); + + expect(observable.getCurrentResult()).toEqual(result); + expect(result.data.people_one).toEqual(optimisticResponse); + } + + { + const result = await stream.takeNext(); + + expect(observable.getCurrentResult()).toEqual(result); + expect(result.data.people_one).toEqual(mutationData); + } + + await expect(stream).not.toEmitAnything(); + }); }); }); describe("assumeImmutableResults", () => { - itAsync( - "should prevent costly (but safe) cloneDeep calls", - async (resolve) => { - const queryOptions = { - query: gql` - query { - value - } - `, - pollInterval: 20, - }; - - function check({ - assumeImmutableResults = true, - assertFrozenResults = false, - }) { - const cache = new InMemoryCache(); - const client = new ApolloClient({ - link: mockSingleLink( - { request: queryOptions, result: { data: { value: 1 } } }, - { request: queryOptions, result: { data: { value: 2 } } }, - { request: queryOptions, result: { data: { value: 3 } } } - ).setOnError((error) => { - throw error; - }), - assumeImmutableResults, - cache, - }); + it("should prevent costly (but safe) cloneDeep calls", async () => { + const queryOptions = { + query: gql` + query { + value + } + `, + pollInterval: 20, + }; + + function check({ + assumeImmutableResults = true, + assertFrozenResults = false, + }) { + const cache = new InMemoryCache(); + const client = new ApolloClient({ + link: mockSingleLink( + { request: queryOptions, result: { data: { value: 1 } } }, + { request: queryOptions, result: { data: { value: 2 } } }, + { request: queryOptions, result: { data: { value: 3 } } } + ).setOnError((error) => { + throw error; + }), + assumeImmutableResults, + cache, + }); - const observable = client.watchQuery(queryOptions); - const values: any[] = []; - - return new Promise((resolve, reject) => { - observable.subscribe({ - next({ data }) { - values.push(data.value); - if (assertFrozenResults) { - try { - data.value = "oyez"; - } catch (error) { - reject(error); - } - } else { - data = { - ...data, - value: "oyez", - }; + const observable = client.watchQuery(queryOptions); + const values: any[] = []; + + return new Promise((resolve, reject) => { + observable.subscribe({ + next({ data }) { + values.push(data.value); + if (assertFrozenResults) { + try { + data.value = "oyez"; + } catch (error) { + reject(error); } - client.writeQuery({ - query: queryOptions.query, - data, - }); - }, - error(err) { - expect(err.message).toMatch(/No more mocked responses/); - resolve(values); - }, - }); + } else { + data = { + ...data, + value: "oyez", + }; + } + client.writeQuery({ + query: queryOptions.query, + data, + }); + }, + error(err) { + expect(err.message).toMatch(/No more mocked responses/); + resolve(values); + }, }); - } + }); + } - async function checkThrows(assumeImmutableResults: boolean) { - try { - await check({ - assumeImmutableResults, - // No matter what value we provide for assumeImmutableResults, if we - // tell the InMemoryCache to deep-freeze its results, destructive - // modifications of the result objects will become fatal. Once you - // start enforcing immutability in this way, you might as well pass - // assumeImmutableResults: true, to prevent calling cloneDeep. - assertFrozenResults: true, - }); - throw new Error("not reached"); - } catch (error) { - expect(error).toBeInstanceOf(TypeError); - expect((error as Error).message).toMatch( - /Cannot assign to read only property 'value'/ - ); - } + async function checkThrows(assumeImmutableResults: boolean) { + try { + await check({ + assumeImmutableResults, + // No matter what value we provide for assumeImmutableResults, if we + // tell the InMemoryCache to deep-freeze its results, destructive + // modifications of the result objects will become fatal. Once you + // start enforcing immutability in this way, you might as well pass + // assumeImmutableResults: true, to prevent calling cloneDeep. + assertFrozenResults: true, + }); + throw new Error("not reached"); + } catch (error) { + expect(error).toBeInstanceOf(TypeError); + expect((error as Error).message).toMatch( + /Cannot assign to read only property 'value'/ + ); } - await checkThrows(true); - await checkThrows(false); - - resolve(); } - ); + await checkThrows(true); + await checkThrows(false); + }); }); describe("resetQueryStoreErrors", () => { - itAsync( - "should remove any GraphQLError's stored in the query store", - (resolve) => { - const graphQLError = new GraphQLError("oh no!"); + it("should remove any GraphQLError's stored in the query store", async () => { + const graphQLError = new GraphQLError("oh no!"); - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { errors: [graphQLError] }, - }); + const observable = mockWatchQuery({ + request: { query, variables }, + result: { errors: [graphQLError] }, + }); + await new Promise((resolve) => { observable.subscribe({ error() { const { queryManager } = observable as any; @@ -3004,31 +3034,27 @@ describe("ObservableQuery", () => { resolve(); }, }); - } - ); + }); + }); - itAsync( - "should remove network error's stored in the query store", - (resolve) => { - const networkError = new Error("oh no!"); + it("should remove network error's stored in the query store", async () => { + const networkError = new Error("oh no!"); - const observable: ObservableQuery = mockWatchQuery({ - request: { query, variables }, - result: { data: dataOne }, - }); + const observable = mockWatchQuery({ + request: { query, variables }, + result: { data: dataOne }, + }); - observable.subscribe({ - next() { - const { queryManager } = observable as any; - const queryInfo = queryManager["queries"].get(observable.queryId); - queryInfo.networkError = networkError; - observable.resetQueryStoreErrors(); - expect(queryInfo.networkError).toBeUndefined(); - resolve(); - }, - }); - } - ); + const stream = new ObservableStream(observable); + + await stream.takeNext(); + + const { queryManager } = observable as any; + const queryInfo = queryManager["queries"].get(observable.queryId); + queryInfo.networkError = networkError; + observable.resetQueryStoreErrors(); + expect(queryInfo.networkError).toBeUndefined(); + }); }); describe(".query computed property", () => { @@ -3078,7 +3104,7 @@ describe("ObservableQuery", () => { expect(observable.query).toBe(result); }); - it("is updated with transformed query when `setOptions` changes the query", () => { + it("is updated with transformed query when `setOptions` changes the query", async () => { const query = gql` query { currentUser { @@ -3111,7 +3137,7 @@ describe("ObservableQuery", () => { } `); - observable.setOptions({ query: updatedQuery }); + await observable.setOptions({ query: updatedQuery }); expect(observable.query).toMatchDocument(gql` query { @@ -3157,85 +3183,80 @@ describe("ObservableQuery", () => { }); }); - itAsync( - "QueryInfo does not notify for !== but deep-equal results", - (resolve, reject) => { - const queryManager = mockQueryManager({ - request: { query, variables }, - result: { data: dataOne }, - }); + it("QueryInfo does not notify for !== but deep-equal results", async () => { + const queryManager = mockQueryManager({ + request: { query, variables }, + result: { data: dataOne }, + }); - const observable = queryManager.watchQuery({ - query, - variables, - // If we let the cache return canonical results, it will be harder to - // write this test, because any two results that are deeply equal will - // also be !==, making the choice of equality test in queryInfo.setDiff - // less visible/important. - canonizeResults: false, - }); + const observable = queryManager.watchQuery({ + query, + variables, + // If we let the cache return canonical results, it will be harder to + // write this test, because any two results that are deeply equal will + // also be !==, making the choice of equality test in queryInfo.setDiff + // less visible/important. + canonizeResults: false, + }); - const queryInfo = observable["queryInfo"]; - const cache = queryInfo["cache"]; - const setDiffSpy = jest.spyOn(queryInfo, "setDiff"); - const notifySpy = jest.spyOn(queryInfo, "notify"); + const queryInfo = observable["queryInfo"]; + const cache = queryInfo["cache"]; + const setDiffSpy = jest.spyOn(queryInfo, "setDiff"); + const notifySpy = jest.spyOn(queryInfo, "notify"); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: dataOne, - }); + const stream = new ObservableStream(observable); - let invalidateCount = 0; - let onWatchUpdatedCount = 0; - - cache.batch({ - optimistic: true, - update(cache) { - cache.modify({ - fields: { - people_one(value, { INVALIDATE }) { - expect(value).toEqual(dataOne.people_one); - ++invalidateCount; - return INVALIDATE; - }, - }, - }); + const result = await stream.takeNext(); + + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataOne, + }); + + let invalidateCount = 0; + let onWatchUpdatedCount = 0; + + cache.batch({ + optimistic: true, + update(cache) { + cache.modify({ + fields: { + people_one(value, { INVALIDATE }) { + expect(value).toEqual(dataOne.people_one); + ++invalidateCount; + return INVALIDATE; }, - // Verify that the cache.modify operation did trigger a cache broadcast. - onWatchUpdated(watch, diff) { - expect(watch.watcher).toBe(queryInfo); - expect(diff).toEqual({ - complete: true, - result: { - people_one: { - name: "Luke Skywalker", - }, - }, - }); - ++onWatchUpdatedCount; + }, + }); + }, + // Verify that the cache.modify operation did trigger a cache broadcast. + onWatchUpdated(watch, diff) { + expect(watch.watcher).toBe(queryInfo); + expect(diff).toEqual({ + complete: true, + result: { + people_one: { + name: "Luke Skywalker", }, - }); + }, + }); + ++onWatchUpdatedCount; + }, + }); - new Promise((resolve) => setTimeout(resolve, 100)) - .then(() => { - expect(setDiffSpy).toHaveBeenCalledTimes(1); - expect(notifySpy).not.toHaveBeenCalled(); - expect(invalidateCount).toBe(1); - expect(onWatchUpdatedCount).toBe(1); - queryManager.stop(); - }) - .then(resolve, reject); - } else { - reject("too many results"); - } - }); - } - ); + await wait(100); - itAsync("ObservableQuery#map respects Symbol.species", (resolve, reject) => { + expect(setDiffSpy).toHaveBeenCalledTimes(1); + expect(notifySpy).not.toHaveBeenCalled(); + expect(invalidateCount).toBe(1); + expect(onWatchUpdatedCount).toBe(1); + queryManager.stop(); + + await expect(stream).not.toEmitAnything(); + }); + + it("ObservableQuery#map respects Symbol.species", async () => { const observable = mockWatchQuery({ request: { query, variables }, result: { data: dataOne }, @@ -3257,22 +3278,24 @@ describe("ObservableQuery", () => { expect(mapped).toBeInstanceOf(Observable); expect(mapped).not.toBeInstanceOf(ObservableQuery); - const sub = mapped.subscribe({ - next(result) { - sub.unsubscribe(); - try { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { mapped: true }, - }); - } catch (error) { - reject(error); - return; - } - resolve(); - }, - error: reject, + await new Promise((resolve, reject) => { + const sub = mapped.subscribe({ + next(result) { + sub.unsubscribe(); + try { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { mapped: true }, + }); + } catch (error) { + reject(error); + return; + } + resolve(); + }, + error: reject, + }); }); }); }); @@ -3475,9 +3498,9 @@ test("handles changing variables in rapid succession before other request is com }); }); - observable.reobserve({ variables: { department: "HR" } }); + void observable.reobserve({ variables: { department: "HR" } }); await wait(10); - observable.reobserve({ variables: { department: null } }); + void observable.reobserve({ variables: { department: null } }); // Wait for request to finish await wait(50); diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts index def25285543..5d6d9592bcc 100644 --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -46,10 +46,11 @@ import wrap from "../../../testing/core/wrap"; import observableToPromise, { observableToPromiseAndSubscription, } from "../../../testing/core/observableToPromise"; -import { itAsync, subscribeAndCount } from "../../../testing/core"; +import { itAsync, wait } from "../../../testing/core"; import { ApolloClient } from "../../../core"; import { mockFetchQuery } from "../ObservableQuery"; import { Concast, print } from "../../../utilities"; +import { ObservableStream } from "../../../testing/internal"; interface MockedMutation { reject: (reason: any) => any; @@ -1790,88 +1791,86 @@ describe("QueryManager", () => { ]).then(resolve, reject); }); - itAsync( - "updates result of previous query if the result of a new query overlaps", - (resolve, reject) => { - const query1 = gql` - { - people_one(id: 1) { - __typename - id - name - age - } + it("updates result of previous query if the result of a new query overlaps", async () => { + const query1 = gql` + { + people_one(id: 1) { + __typename + id + name + age } - `; + } + `; - const data1 = { - people_one: { - // Correctly identifying this entity is necessary so that fields - // from query1 and query2 can be safely merged in the cache. - __typename: "Human", - id: 1, - name: "Luke Skywalker", - age: 50, - }, - }; + const data1 = { + people_one: { + // Correctly identifying this entity is necessary so that fields + // from query1 and query2 can be safely merged in the cache. + __typename: "Human", + id: 1, + name: "Luke Skywalker", + age: 50, + }, + }; - const query2 = gql` - { - people_one(id: 1) { - __typename - id - name - username - } + const query2 = gql` + { + people_one(id: 1) { + __typename + id + name + username } - `; + } + `; - const data2 = { + const data2 = { + people_one: { + __typename: "Human", + id: 1, + name: "Luke Skywalker has a new name", + username: "luke", + }, + }; + + const queryManager = mockQueryManager( + { + request: { query: query1 }, + result: { data: data1 }, + }, + { + request: { query: query2 }, + result: { data: data2 }, + delay: 10, + } + ); + + const observable = queryManager.watchQuery({ query: query1 }); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitMatchedValue({ data: data1 }); + + await queryManager.query({ query: query2 }); + + await expect(stream).toEmitMatchedValue({ + data: { people_one: { __typename: "Human", id: 1, name: "Luke Skywalker has a new name", - username: "luke", - }, - }; - - const queryManager = mockQueryManager( - { - request: { query: query1 }, - result: { data: data1 }, + age: 50, }, - { - request: { query: query2 }, - result: { data: data2 }, - delay: 10, - } - ); - - const observable = queryManager.watchQuery({ query: query1 }); + }, + }); - subscribeAndCount(reject, observable, (handleCount, result) => { - if (handleCount === 1) { - expect(result.data).toEqual(data1); - queryManager.query({ query: query2 }); - } else if (handleCount === 2) { - expect(result.data).toEqual({ - people_one: { - __typename: "Human", - id: 1, - name: "Luke Skywalker has a new name", - age: 50, - }, - }); - resolve(); - } - }); - } - ); + await expect(stream).not.toEmitAnything(); + }); itAsync("warns if you forget the template literal tag", async (resolve) => { const queryManager = mockQueryManager(); expect(() => { - queryManager.query({ + void queryManager.query({ // Bamboozle TypeScript into letting us do this query: "string" as any as DocumentNode, }); @@ -2438,262 +2437,224 @@ describe("QueryManager", () => { } ); - itAsync( - "should not write unchanged network results to cache", - (resolve, reject) => { - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - info: { - merge: false, - }, + it("should not write unchanged network results to cache", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + info: { + merge: false, }, }, }, - }); + }, + }); - const client = new ApolloClient({ - cache, - link: new ApolloLink( - (operation) => - new Observable((observer: Observer) => { - switch (operation.operationName) { - case "A": - observer.next!({ data: { info: { a: "ay" } } }); - break; - case "B": - observer.next!({ data: { info: { b: "bee" } } }); - break; - } - observer.complete!(); - }) - ), - }); + const client = new ApolloClient({ + cache, + link: new ApolloLink( + (operation) => + new Observable((observer: Observer) => { + switch (operation.operationName) { + case "A": + observer.next!({ data: { info: { a: "ay" } } }); + break; + case "B": + observer.next!({ data: { info: { b: "bee" } } }); + break; + } + observer.complete!(); + }) + ), + }); - const queryA = gql` - query A { - info { - a - } + const queryA = gql` + query A { + info { + a } - `; - const queryB = gql` - query B { - info { - b - } + } + `; + const queryB = gql` + query B { + info { + b } - `; + } + `; - const obsA = client.watchQuery({ - query: queryA, - returnPartialData: true, - }); + const obsA = client.watchQuery({ + query: queryA, + returnPartialData: true, + }); - const obsB = client.watchQuery({ - query: queryB, - returnPartialData: true, - }); + const obsB = client.watchQuery({ + query: queryB, + returnPartialData: true, + }); - subscribeAndCount(reject, obsA, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: {}, - partial: true, - }); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - a: "ay", - }, - }, - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: { - info: {}, - }, - partial: true, - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - a: "ay", - }, - }, - }); - setTimeout(resolve, 100); - } else { - reject(new Error(`Unexpected ${JSON.stringify({ count, result })}`)); - } - }); + const aStream = new ObservableStream(obsA); + const bStream = new ObservableStream(obsB); - subscribeAndCount(reject, obsB, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: {}, - partial: true, - }); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - b: "bee", - }, - }, - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: { - info: {}, - }, - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - b: "bee", - }, - }, - }); - setTimeout(resolve, 100); - } else { - reject(new Error(`Unexpected ${JSON.stringify({ count, result })}`)); - } - }); - } - ); + await expect(aStream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); - itAsync( - "should disable feud-stopping logic after evict or modify", - (resolve, reject) => { - const cache = new InMemoryCache({ - typePolicies: { - Query: { - fields: { - info: { - merge: false, - }, + await expect(bStream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + await expect(aStream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + a: "ay", + }, + }, + }); + + await expect(bStream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + b: "bee", + }, + }, + }); + + await expect(aStream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: { + info: {}, + }, + partial: true, + }); + + await expect(aStream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + a: "ay", + }, + }, + }); + + await expect(aStream).not.toEmitAnything(); + await expect(bStream).not.toEmitAnything(); + }); + + it("should disable feud-stopping logic after evict or modify", async () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + info: { + merge: false, }, }, }, - }); + }, + }); - const client = new ApolloClient({ - cache, - link: new ApolloLink( - (operation) => - new Observable((observer: Observer) => { - observer.next!({ data: { info: { c: "see" } } }); - observer.complete!(); - }) - ), - }); + const client = new ApolloClient({ + cache, + link: new ApolloLink( + () => + new Observable((observer: Observer) => { + observer.next!({ data: { info: { c: "see" } } }); + observer.complete!(); + }) + ), + }); - const query = gql` - query { - info { - c - } + const query = gql` + query { + info { + c } - `; + } + `; - const obs = client.watchQuery({ - query, - returnPartialData: true, - }); + const obs = client.watchQuery({ + query, + returnPartialData: true, + }); - subscribeAndCount(reject, obs, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: {}, - partial: true, - }); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - c: "see", - }, - }, - }); + const stream = new ObservableStream(obs); - cache.evict({ - fieldName: "info", - }); - } else if (count === 3) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: {}, - partial: true, - }); - } else if (count === 4) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - c: "see", - }, - }, - }); + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); - cache.modify({ - fields: { - info(_, { DELETE }) { - return DELETE; - }, - }, - }); - } else if (count === 5) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - data: {}, - partial: true, - }); - } else if (count === 6) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - info: { - c: "see", - }, - }, - }); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); - setTimeout(resolve, 100); - } else { - reject(new Error(`Unexpected ${JSON.stringify({ count, result })}`)); - } - }); - } - ); + cache.evict({ fieldName: "info" }); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); + + cache.modify({ + fields: { + info(_, { DELETE }) { + return DELETE; + }, + }, + }); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); + + await expect(stream).not.toEmitAnything(); + }); itAsync( "should not error when replacing unidentified data with a normalized ID", @@ -2944,66 +2905,65 @@ describe("QueryManager", () => { } ); - itAsync( - 'only increments "queryInfo.lastRequestId" when fetching data from network', - (resolve, reject) => { - const query = gql` - query query($id: ID!) { - people_one(id: $id) { - name - } + it('only increments "queryInfo.lastRequestId" when fetching data from network', async () => { + const query = gql` + query query($id: ID!) { + people_one(id: $id) { + name } - `; - const variables = { id: 1 }; - const dataOne = { - people_one: { - name: "Luke Skywalker", - }, - }; - const mockedResponses = [ - { - request: { query, variables }, - result: { data: dataOne }, - }, - ]; + } + `; + const variables = { id: 1 }; + const dataOne = { + people_one: { + name: "Luke Skywalker", + }, + }; + const mockedResponses = [ + { + request: { query, variables }, + result: { data: dataOne }, + }, + ]; - const queryManager = mockQueryManager(...mockedResponses); - const queryOptions: WatchQueryOptions = { - query, - variables, - fetchPolicy: "cache-and-network", - }; - const observable = queryManager.watchQuery(queryOptions); + const queryManager = mockQueryManager(...mockedResponses); + const queryOptions: WatchQueryOptions = { + query, + variables, + fetchPolicy: "cache-and-network", + }; + const observable = queryManager.watchQuery(queryOptions); - const mocks = mockFetchQuery(queryManager); - const queryId = "1"; - const getQuery: QueryManager["getQuery"] = ( - queryManager as any - ).getQuery.bind(queryManager); - - subscribeAndCount(reject, observable, async (handleCount) => { - const query = getQuery(queryId); - const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; - expect(query.lastRequestId).toEqual(1); - expect(fqbpCalls.length).toBe(1); - - // Simulate updating the options of the query, which will trigger - // fetchQueryByPolicy, but it should just read from cache and not - // update "queryInfo.lastRequestId". For more information, see - // https://github.com/apollographql/apollo-client/pull/7956#issue-610298427 - await observable.setOptions({ - ...queryOptions, - fetchPolicy: "cache-first", - }); + const mocks = mockFetchQuery(queryManager); + const queryId = "1"; + const getQuery: QueryManager["getQuery"] = ( + queryManager as any + ).getQuery.bind(queryManager); - // "fetchQueryByPolicy" was called, but "lastRequestId" does not update - // since it was able to read from cache. - expect(query.lastRequestId).toEqual(1); - expect(fqbpCalls.length).toBe(2); - resolve(); + const stream = new ObservableStream(observable); + + await expect(stream).toEmitNext(); + + { + const query = getQuery(queryId); + const fqbpCalls = mocks.fetchQueryByPolicy.mock.calls; + + expect(query.lastRequestId).toEqual(1); + expect(fqbpCalls.length).toBe(1); + + // Simulate updating the options of the query, which will trigger + // fetchQueryByPolicy, but it should just read from cache and not + // update "queryInfo.lastRequestId". For more information, see + // https://github.com/apollographql/apollo-client/pull/7956#issue-610298427 + await observable.setOptions({ + ...queryOptions, + fetchPolicy: "cache-first", }); + + expect(query.lastRequestId).toEqual(1); + expect(fqbpCalls.length).toBe(2); } - ); + }); describe("polling queries", () => { itAsync("allows you to poll queries", (resolve, reject) => { @@ -5196,92 +5156,86 @@ describe("QueryManager", () => { } ); - itAsync( - "also works with a query document and variables", - (resolve, reject) => { - const mutation = gql` - mutation changeAuthorName($id: ID!) { - changeAuthorName(newName: "Jack Smith", id: $id) { - firstName - lastName - } + it("also works with a query document and variables", async () => { + const mutation = gql` + mutation changeAuthorName($id: ID!) { + changeAuthorName(newName: "Jack Smith", id: $id) { + firstName + lastName } - `; - const mutationData = { - changeAuthorName: { - firstName: "Jack", - lastName: "Smith", - }, - }; - const query = gql` - query getAuthors($id: ID!) { - author(id: $id) { - firstName - lastName - } + } + `; + const mutationData = { + changeAuthorName: { + firstName: "Jack", + lastName: "Smith", + }, + }; + const query = gql` + query getAuthors($id: ID!) { + author(id: $id) { + firstName + lastName } - `; - const data = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; - const secondReqData = { - author: { - firstName: "Jane", - lastName: "Johnson", - }, - }; + } + `; + const data = { + author: { + firstName: "John", + lastName: "Smith", + }, + }; + const secondReqData = { + author: { + firstName: "Jane", + lastName: "Johnson", + }, + }; - const variables = { id: "1234" }; - const mutationVariables = { id: "2345" }; - const queryManager = mockQueryManager( - { - request: { query, variables }, - result: { data }, - delay: 10, - }, - { - request: { query, variables }, - result: { data: secondReqData }, - delay: 100, - }, - { - request: { query: mutation, variables: mutationVariables }, - result: { data: mutationData }, - delay: 10, - } - ); - const observable = queryManager.watchQuery({ query, variables }); + const variables = { id: "1234" }; + const mutationVariables = { id: "2345" }; + const queryManager = mockQueryManager( + { + request: { query, variables }, + result: { data }, + delay: 10, + }, + { + request: { query, variables }, + result: { data: secondReqData }, + delay: 100, + }, + { + request: { query: mutation, variables: mutationVariables }, + result: { data: mutationData }, + delay: 10, + } + ); + const observable = queryManager.watchQuery({ query, variables }); + const stream = new ObservableStream(observable); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result.data).toEqual(data); - queryManager.mutate({ - mutation, - variables: mutationVariables, - refetchQueries: [{ query, variables }], - }); - } else if (count === 2) { - expect(result.data).toEqual(secondReqData); - expect(observable.getCurrentResult().data).toEqual(secondReqData); + await expect(stream).toEmitMatchedValue({ data }); - return new Promise((res) => setTimeout(res, 10)) - .then(() => { - // Make sure the QueryManager cleans up legacy one-time queries like - // the one we requested above using refetchQueries. - queryManager["queries"].forEach((queryInfo, queryId) => { - expect(queryId).not.toContain("legacyOneTimeQuery"); - }); - }) - .then(resolve, reject); - } else { - reject("too many results"); - } - }); - } - ); + await queryManager.mutate({ + mutation, + variables: mutationVariables, + refetchQueries: [{ query, variables }], + }); + + await expect(stream).toEmitMatchedValue( + { data: secondReqData }, + { timeout: 150 } + ); + expect(observable.getCurrentResult().data).toEqual(secondReqData); + + await wait(10); + + queryManager["queries"].forEach((_, queryId) => { + expect(queryId).not.toContain("legacyOneTimeQuery"); + }); + + await expect(stream).not.toEmitAnything(); + }); itAsync( "also works with a conditional function that returns false", diff --git a/src/core/__tests__/fetchPolicies.ts b/src/core/__tests__/fetchPolicies.ts index f5ed3743f4b..0208b6982c5 100644 --- a/src/core/__tests__/fetchPolicies.ts +++ b/src/core/__tests__/fetchPolicies.ts @@ -4,11 +4,12 @@ import { ApolloClient, NetworkStatus } from "../../core"; import { ApolloLink } from "../../link/core"; import { InMemoryCache } from "../../cache"; import { Observable } from "../../utilities"; -import { subscribeAndCount, itAsync, mockSingleLink } from "../../testing"; +import { itAsync, mockSingleLink } from "../../testing"; import { TypedDocumentNode } from "@graphql-typed-document-node/core"; import { WatchQueryFetchPolicy, WatchQueryOptions } from "../watchQueryOptions"; import { ApolloQueryResult } from "../types"; import { ObservableQuery } from "../ObservableQuery"; +import { ObservableStream, spyOnConsole } from "../../testing/internal"; const query = gql` query { @@ -431,234 +432,243 @@ describe("no-cache", () => { .then(resolve, reject); }); - itAsync( - "gives appropriate networkStatus for watched queries", - (resolve, reject) => { - const client = new ApolloClient({ - link: ApolloLink.empty(), - cache: new InMemoryCache(), - resolvers: { - Query: { - hero(_data, args) { - return { - __typename: "Hero", - ...args, - name: "Luke Skywalker", - }; - }, + it("gives appropriate networkStatus for watched queries", async () => { + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + resolvers: { + Query: { + hero(_data, args) { + return { + __typename: "Hero", + ...args, + name: "Luke Skywalker", + }; }, }, - }); + }, + }); - const observable = client.watchQuery({ - query: gql` - query FetchLuke($id: String) { - hero(id: $id) @client { - id - name - } + const observable = client.watchQuery({ + query: gql` + query FetchLuke($id: String) { + hero(id: $id) @client { + id + name } - `, - fetchPolicy: "no-cache", - variables: { id: "1" }, - notifyOnNetworkStatusChange: true, - }); + } + `, + fetchPolicy: "no-cache", + variables: { id: "1" }, + notifyOnNetworkStatusChange: true, + }); - function dataWithId(id: number | string) { - return { - hero: { - __typename: "Hero", - id: String(id), - name: "Luke Skywalker", - }, - }; - } + const stream = new ObservableStream(observable); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - data: dataWithId(1), - loading: false, - networkStatus: NetworkStatus.ready, - }); - expect(client.cache.extract(true)).toEqual({}); - return observable.setVariables({ id: "2" }); - } else if (count === 2) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.setVariables, - partial: true, - }); - } else if (count === 3) { - expect(result).toEqual({ - data: dataWithId(2), - loading: false, - networkStatus: NetworkStatus.ready, - }); - expect(client.cache.extract(true)).toEqual({}); - return observable.refetch(); - } else if (count === 4) { - expect(result).toEqual({ - data: dataWithId(2), - loading: true, - networkStatus: NetworkStatus.refetch, - }); - expect(client.cache.extract(true)).toEqual({}); - } else if (count === 5) { - expect(result).toEqual({ - data: dataWithId(2), - loading: false, - networkStatus: NetworkStatus.ready, - }); - expect(client.cache.extract(true)).toEqual({}); - return observable.refetch({ id: "3" }); - } else if (count === 6) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.setVariables, - partial: true, - }); - expect(client.cache.extract(true)).toEqual({}); - } else if (count === 7) { - expect(result).toEqual({ - data: dataWithId(3), - loading: false, - networkStatus: NetworkStatus.ready, - }); - expect(client.cache.extract(true)).toEqual({}); - resolve(); - } - }); + function dataWithId(id: number | string) { + return { + hero: { + __typename: "Hero", + id: String(id), + name: "Luke Skywalker", + }, + }; } - ); + + await expect(stream).toEmitValue({ + data: dataWithId(1), + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(client.cache.extract(true)).toEqual({}); + + await observable.setVariables({ id: "2" }); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.setVariables, + partial: true, + }); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(client.cache.extract(true)).toEqual({}); + + await observable.refetch(); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: true, + networkStatus: NetworkStatus.refetch, + }); + expect(client.cache.extract(true)).toEqual({}); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(client.cache.extract(true)).toEqual({}); + + await observable.refetch({ id: "3" }); + + await expect(stream).toEmitValue({ + loading: true, + networkStatus: NetworkStatus.setVariables, + partial: true, + }); + expect(client.cache.extract(true)).toEqual({}); + + await expect(stream).toEmitValue({ + data: dataWithId(3), + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(client.cache.extract(true)).toEqual({}); + + await expect(stream).not.toEmitAnything(); + }); }); }); describe("cache-first", () => { - itAsync.skip( - "does not trigger network request during optimistic update", - (resolve, reject) => { - const results: any[] = []; - const client = new ApolloClient({ - link: new ApolloLink((operation, forward) => { - return forward(operation).map((result) => { - results.push(result); - return result; - }); - }).concat(createMutationLink(reject)), - cache: new InMemoryCache(), - }); + it("does not trigger network request during optimistic update", async () => { + const results: any[] = []; + const client = new ApolloClient({ + link: new ApolloLink((operation, forward) => { + return forward(operation).map((result) => { + results.push(result); + return result; + }); + }).concat( + createMutationLink((error) => { + throw error; + }) + ), + cache: new InMemoryCache(), + }); - let inOptimisticTransaction = false; + let inOptimisticTransaction = false; - subscribeAndCount( - reject, - client.watchQuery({ - query, - fetchPolicy: "cache-and-network", - returnPartialData: true, - }), - (count, { data, loading, networkStatus }) => { - if (count === 1) { - expect(results.length).toBe(0); - expect(loading).toBe(true); - expect(networkStatus).toBe(NetworkStatus.loading); - expect(data).toEqual({}); - } else if (count === 2) { - expect(results.length).toBe(1); - expect(loading).toBe(false); - expect(networkStatus).toBe(NetworkStatus.ready); - expect(data).toEqual({ - author: { - __typename: "Author", - id: 1, - firstName: "John", - lastName: "Smith", - }, - }); + const stream = new ObservableStream( + client.watchQuery({ + query, + fetchPolicy: "cache-and-network", + returnPartialData: true, + }) + ); - inOptimisticTransaction = true; - client.cache.recordOptimisticTransaction((cache) => { - cache.writeQuery({ - query, - data: { - author: { - __typename: "Bogus", - }, - }, - }); - }, "bogus"); - } else if (count === 3) { - expect(results.length).toBe(1); - expect(loading).toBe(false); - expect(networkStatus).toBe(NetworkStatus.ready); - expect(data).toEqual({ - author: { - __typename: "Bogus", - }, - }); + await expect(stream).toEmitValue({ + data: {}, + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + expect(results).toHaveLength(0); + + await expect(stream).toEmitValue({ + data: { + author: { + __typename: "Author", + id: 1, + firstName: "John", + lastName: "Smith", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(results).toHaveLength(1); + + inOptimisticTransaction = true; + client.cache.recordOptimisticTransaction((cache) => { + // Silence partial data write error + using _ = spyOnConsole("error"); + cache.writeQuery({ + query, + data: { + author: { + __typename: "Bogus", + }, + }, + }); + }, "bogus"); - setTimeout(() => { - inOptimisticTransaction = false; - client.cache.removeOptimistic("bogus"); - }, 100); - } else if (count === 4) { - // A network request should not be triggered until after the bogus - // optimistic transaction has been removed. - expect(inOptimisticTransaction).toBe(false); - expect(results.length).toBe(1); - expect(loading).toBe(false); - expect(networkStatus).toBe(NetworkStatus.ready); - expect(data).toEqual({ - author: { - __typename: "Author", - id: 1, - firstName: "John", - lastName: "Smith", - }, - }); + await expect(stream).toEmitValue({ + data: { + author: { + __typename: "Bogus", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: true, + }); + expect(results).toHaveLength(1); + + setTimeout(() => { + inOptimisticTransaction = false; + client.cache.removeOptimistic("bogus"); + }, 50); + + await expect(stream).toEmitValue({ + data: { + author: { + __typename: "Author", + id: 1, + firstName: "John", + lastName: "Smith", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + // A network request should not be triggered until after the bogus + // optimistic transaction has been removed. + expect(inOptimisticTransaction).toBe(false); + expect(results).toHaveLength(1); - client.cache.writeQuery({ - query, - data: { - author: { - __typename: "Author", - id: 2, - firstName: "Chinua", - lastName: "Achebe", - }, - }, - }); - } else if (count === 5) { - expect(inOptimisticTransaction).toBe(false); - expect(results.length).toBe(1); - expect(loading).toBe(false); - expect(networkStatus).toBe(NetworkStatus.ready); - expect(data).toEqual({ - author: { - __typename: "Author", - id: 2, - firstName: "Chinua", - lastName: "Achebe", - }, - }); - setTimeout(resolve, 100); - } else { - reject(new Error("unreached")); - } - } - ); - } - ); + client.cache.writeQuery({ + query, + data: { + author: { + __typename: "Author", + id: 2, + firstName: "Chinua", + lastName: "Achebe", + }, + }, + }); + + await expect(stream).toEmitValue({ + data: { + author: { + __typename: "Author", + id: 2, + firstName: "Chinua", + lastName: "Achebe", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + expect(inOptimisticTransaction).toBe(false); + expect(results).toHaveLength(1); + + await expect(stream).not.toEmitAnything(); + }); }); describe("cache-only", () => { - itAsync("allows explicit refetch to happen", (resolve, reject) => { + it("allows explicit refetch to happen", async () => { let counter = 0; const client = new ApolloClient({ cache: new InMemoryCache(), link: new ApolloLink( - (operation) => + () => new Observable((observer) => { observer.next({ data: { @@ -681,135 +691,129 @@ describe("cache-only", () => { nextFetchPolicy: "cache-only", }); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - count: 1, - }, - }); - - expect(observable.options.fetchPolicy).toBe("cache-only"); + const stream = new ObservableStream(observable); - observable.refetch().catch(reject); - } else if (count === 2) { - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data: { - count: 2, - }, - }); + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + count: 1, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-only"); - expect(observable.options.fetchPolicy).toBe("cache-only"); + await observable.refetch(); - setTimeout(resolve, 50); - } else { - reject(`too many results (${count})`); - } + await expect(stream).toEmitValue({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + count: 2, + }, }); + + expect(observable.options.fetchPolicy).toBe("cache-only"); + + await expect(stream).not.toEmitAnything(); }); }); describe("cache-and-network", function () { - itAsync( - "gives appropriate networkStatus for refetched queries", - (resolve, reject) => { - const client = new ApolloClient({ - link: ApolloLink.empty(), - cache: new InMemoryCache(), - resolvers: { - Query: { - hero(_data, args) { - return { - __typename: "Hero", - ...args, - name: "Luke Skywalker", - }; - }, + it("gives appropriate networkStatus for refetched queries", async () => { + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + resolvers: { + Query: { + hero(_data, args) { + return { + __typename: "Hero", + ...args, + name: "Luke Skywalker", + }; }, }, - }); + }, + }); - const observable = client.watchQuery({ - query: gql` - query FetchLuke($id: String) { - hero(id: $id) @client { - id - name - } + const observable = client.watchQuery({ + query: gql` + query FetchLuke($id: String) { + hero(id: $id) @client { + id + name } - `, - fetchPolicy: "cache-and-network", - variables: { id: "1" }, - notifyOnNetworkStatusChange: true, - }); + } + `, + fetchPolicy: "cache-and-network", + variables: { id: "1" }, + notifyOnNetworkStatusChange: true, + }); - function dataWithId(id: number | string) { - return { - hero: { - __typename: "Hero", - id: String(id), - name: "Luke Skywalker", - }, - }; - } + const stream = new ObservableStream(observable); - subscribeAndCount(reject, observable, (count, result) => { - if (count === 1) { - expect(result).toEqual({ - data: dataWithId(1), - loading: false, - networkStatus: NetworkStatus.ready, - }); - return observable.setVariables({ id: "2" }); - } else if (count === 2) { - expect(result).toEqual({ - data: {}, - loading: true, - networkStatus: NetworkStatus.setVariables, - partial: true, - }); - } else if (count === 3) { - expect(result).toEqual({ - data: dataWithId(2), - loading: false, - networkStatus: NetworkStatus.ready, - }); - return observable.refetch(); - } else if (count === 4) { - expect(result).toEqual({ - data: dataWithId(2), - loading: true, - networkStatus: NetworkStatus.refetch, - }); - } else if (count === 5) { - expect(result).toEqual({ - data: dataWithId(2), - loading: false, - networkStatus: NetworkStatus.ready, - }); - return observable.refetch({ id: "3" }); - } else if (count === 6) { - expect(result).toEqual({ - data: {}, - loading: true, - networkStatus: NetworkStatus.setVariables, - partial: true, - }); - } else if (count === 7) { - expect(result).toEqual({ - data: dataWithId(3), - loading: false, - networkStatus: NetworkStatus.ready, - }); - resolve(); - } - }); + function dataWithId(id: number | string) { + return { + hero: { + __typename: "Hero", + id: String(id), + name: "Luke Skywalker", + }, + }; } - ); + + await expect(stream).toEmitValue({ + data: dataWithId(1), + loading: false, + networkStatus: NetworkStatus.ready, + }); + + await observable.setVariables({ id: "2" }); + + await expect(stream).toEmitValue({ + data: {}, + loading: true, + networkStatus: NetworkStatus.setVariables, + partial: true, + }); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: false, + networkStatus: NetworkStatus.ready, + }); + + await observable.refetch(); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: true, + networkStatus: NetworkStatus.refetch, + }); + + await expect(stream).toEmitValue({ + data: dataWithId(2), + loading: false, + networkStatus: NetworkStatus.ready, + }); + + await observable.refetch({ id: "3" }); + + await expect(stream).toEmitValue({ + data: {}, + loading: true, + networkStatus: NetworkStatus.setVariables, + partial: true, + }); + + await expect(stream).toEmitValue({ + data: dataWithId(3), + loading: false, + networkStatus: NetworkStatus.ready, + }); + + await expect(stream).not.toEmitAnything(); + }); }); describe("nextFetchPolicy", () => { @@ -861,102 +865,77 @@ describe("nextFetchPolicy", () => { fetchPolicy: WatchQueryFetchPolicy; nextFetchPolicy: WatchQueryOptions<{}, TData>["nextFetchPolicy"]; useDefaultOptions: boolean; - onResult(info: { - count: number; - result: ApolloQueryResult; + checkResult: (info: { + stream: ObservableStream>; observable: ObservableQuery; - resolve(result?: any): void; - reject(reason?: any): void; - }): void; + }) => Promise; }) => - itAsync( - `transitions ${args.fetchPolicy} to ${ - typeof args.nextFetchPolicy === "function" ? - args.nextFetchPolicy.name - : args.nextFetchPolicy - } (${args.useDefaultOptions ? "" : "not "}using defaults)`, - (resolve, reject) => { - const client = new ApolloClient({ - link: makeLink(), - cache: new InMemoryCache({ - addTypename: true, - }), - defaultOptions: { - watchQuery: - args.useDefaultOptions ? - { - nextFetchPolicy: args.nextFetchPolicy, - } - : {}, - }, - }); + it(`transitions ${args.fetchPolicy} to ${ + typeof args.nextFetchPolicy === "function" ? + args.nextFetchPolicy.name + : args.nextFetchPolicy + } (${args.useDefaultOptions ? "" : "not "}using defaults)`, async () => { + const client = new ApolloClient({ + link: makeLink(), + cache: new InMemoryCache({ + addTypename: true, + }), + defaultOptions: { + watchQuery: + args.useDefaultOptions ? + { + nextFetchPolicy: args.nextFetchPolicy, + } + : {}, + }, + }); - const watchQueryOptions: WatchQueryOptions = { - query: EchoQuery, - fetchPolicy: args.fetchPolicy, - }; + const watchQueryOptions: WatchQueryOptions = { + query: EchoQuery, + fetchPolicy: args.fetchPolicy, + }; - if (!args.useDefaultOptions) { - watchQueryOptions.nextFetchPolicy = args.nextFetchPolicy; - } + if (!args.useDefaultOptions) { + watchQueryOptions.nextFetchPolicy = args.nextFetchPolicy; + } - const observable = client.watchQuery(watchQueryOptions); + const observable = client.watchQuery(watchQueryOptions); - expect(observable.options.fetchPolicy).toBe(args.fetchPolicy); + expect(observable.options.fetchPolicy).toBe(args.fetchPolicy); - subscribeAndCount(reject, observable, (count, result) => { - return args.onResult({ - observable, - count, - result, - resolve, - reject, - }); - }); - } - ); + await args.checkResult({ + observable, + stream: new ObservableStream(observable), + }); + }); type CheckOptions = Parameters[0]; type NextFetchPolicy = CheckOptions["nextFetchPolicy"]; - type OnResultCallback = CheckOptions["onResult"]; + type CheckResultCallback = CheckOptions["checkResult"]; // We'll use this same OnResultCallback for multiple tests, to make it easier // to tell that the behavior of the tests is the same. - const onResultNetworkOnlyToCacheFirst: OnResultCallback = ({ + const onResultNetworkOnlyToCacheFirst: CheckResultCallback = async ({ observable, - count, - result, - resolve, - reject, + stream, }) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 1, - opName: "EchoQuery", - opVars: {}, - }); + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }, + }, + }); - expect(observable.options.fetchPolicy).toBe("cache-first"); + expect(observable.options.fetchPolicy).toBe("cache-first"); + + { + const result = await observable.refetch({ refetching: true }); - observable - .refetch({ - refetching: true, - }) - .then((result) => { - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 2, - opName: "EchoQuery", - opVars: { - refetching: true, - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", linkCounter: 2, @@ -965,31 +944,31 @@ describe("nextFetchPolicy", () => { refetching: true, }, }); + } - expect(observable.options.fetchPolicy).toBe("cache-first"); - - observable - .reobserve({ - variables: { - refetching: false, + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, }, - }) - .then((result) => { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 3, - opName: "EchoQuery", - opVars: { - refetching: false, - }, - }); - }) - .catch(reject); + }, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + { + const result = await observable.reobserve({ + variables: { + refetching: false, + }, + }); - // Changing variables resets the fetchPolicy to its initial value. - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (count === 3) { expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", @@ -1000,27 +979,42 @@ describe("nextFetchPolicy", () => { }, }); - // But nextFetchPolicy is applied again after the first request. + // Changing variables resets the fetchPolicy to its initial value. expect(observable.options.fetchPolicy).toBe("cache-first"); - - setTimeout(resolve, 20); - } else { - reject(`Too many results (${count})`); } + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }, + }, + }); + + // But nextFetchPolicy is applied again after the first request. + expect(observable.options.fetchPolicy).toBe("cache-first"); + + await expect(stream).not.toEmitAnything(); }; checkNextFetchPolicy({ useDefaultOptions: false, fetchPolicy: "network-only", nextFetchPolicy: "cache-first", - onResult: onResultNetworkOnlyToCacheFirst, + checkResult: onResultNetworkOnlyToCacheFirst, }); checkNextFetchPolicy({ useDefaultOptions: true, fetchPolicy: "network-only", nextFetchPolicy: "cache-first", - onResult: onResultNetworkOnlyToCacheFirst, + checkResult: onResultNetworkOnlyToCacheFirst, }); const nextFetchPolicyNetworkOnlyToCacheFirst: NextFetchPolicy = function ( @@ -1044,51 +1038,36 @@ describe("nextFetchPolicy", () => { useDefaultOptions: false, fetchPolicy: "network-only", nextFetchPolicy: nextFetchPolicyNetworkOnlyToCacheFirst, - onResult: onResultNetworkOnlyToCacheFirst, + checkResult: onResultNetworkOnlyToCacheFirst, }); checkNextFetchPolicy({ useDefaultOptions: true, fetchPolicy: "network-only", nextFetchPolicy: nextFetchPolicyNetworkOnlyToCacheFirst, - onResult: onResultNetworkOnlyToCacheFirst, + checkResult: onResultNetworkOnlyToCacheFirst, }); - const onResultCacheAndNetworkToCacheFirst: OnResultCallback = ({ + const onResultCacheAndNetworkToCacheFirst: CheckResultCallback = async ({ observable, - count, - result, - resolve, - reject, + stream, }) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 1, - opName: "EchoQuery", - opVars: {}, - }); + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); - expect(observable.options.fetchPolicy).toBe("cache-first"); + { + const result = await observable.refetch({ refetching: true }); - observable - .refetch({ - refetching: true, - }) - .then((result) => { - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 2, - opName: "EchoQuery", - opVars: { - refetching: true, - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", linkCounter: 2, @@ -1097,44 +1076,32 @@ describe("nextFetchPolicy", () => { refetching: true, }, }); + } - expect(observable.options.fetchPolicy).toBe("cache-first"); - - observable - .reobserve({ - variables: { - refetching: false, + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, }, - }) - .then((result) => { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 3, - opName: "EchoQuery", - opVars: { - refetching: false, - }, - }); - }) - .catch(reject); + }, + }, + }); + // Changing variables resets the fetchPolicy to its initial value. + // expect(observable.options.fetchPolicy).toBe("cache-and-network"); + expect(observable.options.fetchPolicy).toBe("cache-first"); - // Changing variables resets the fetchPolicy to its initial value. - // expect(observable.options.fetchPolicy).toBe("cache-and-network"); - } else if (count === 3) { - expect(result.loading).toBe(true); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 2, - opName: "EchoQuery", - opVars: { - refetching: true, + { + const result = await observable.reobserve({ + variables: { + refetching: false, }, }); - // But nextFetchPolicy is applied again after the first request. - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (count === 4) { expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", @@ -1144,27 +1111,54 @@ describe("nextFetchPolicy", () => { refetching: false, }, }); + } - expect(observable.options.fetchPolicy).toBe("cache-first"); + await expect(stream).toEmitMatchedValue({ + loading: true, + data: { + echo: { + __typename: "Echo", + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }, + }, + }); + // But nextFetchPolicy is applied again after the first request. + expect(observable.options.fetchPolicy).toBe("cache-first"); + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); - setTimeout(resolve, 20); - } else { - reject(`Too many results (${count})`); - } + await expect(stream).not.toEmitAnything(); }; checkNextFetchPolicy({ useDefaultOptions: false, fetchPolicy: "cache-and-network", nextFetchPolicy: "cache-first", - onResult: onResultCacheAndNetworkToCacheFirst, + checkResult: onResultCacheAndNetworkToCacheFirst, }); checkNextFetchPolicy({ useDefaultOptions: true, fetchPolicy: "cache-and-network", nextFetchPolicy: "cache-first", - onResult: onResultCacheAndNetworkToCacheFirst, + checkResult: onResultCacheAndNetworkToCacheFirst, }); const nextFetchPolicyCacheAndNetworkToCacheFirst: NextFetchPolicy = function ( @@ -1188,14 +1182,14 @@ describe("nextFetchPolicy", () => { useDefaultOptions: false, fetchPolicy: "cache-and-network", nextFetchPolicy: nextFetchPolicyCacheAndNetworkToCacheFirst, - onResult: onResultCacheAndNetworkToCacheFirst, + checkResult: onResultCacheAndNetworkToCacheFirst, }); checkNextFetchPolicy({ useDefaultOptions: true, fetchPolicy: "cache-and-network", nextFetchPolicy: nextFetchPolicyCacheAndNetworkToCacheFirst, - onResult: onResultCacheAndNetworkToCacheFirst, + checkResult: onResultCacheAndNetworkToCacheFirst, }); const nextFetchPolicyAlwaysCacheFirst: NextFetchPolicy = function ( @@ -1207,41 +1201,26 @@ describe("nextFetchPolicy", () => { return "cache-first"; }; - const onResultCacheAndNetworkAlwaysCacheFirst: OnResultCallback = ({ + const onResultCacheAndNetworkAlwaysCacheFirst: CheckResultCallback = async ({ observable, - count, - result, - resolve, - reject, + stream, }) => { - if (count === 1) { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 1, - opName: "EchoQuery", - opVars: {}, - }); + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); - expect(observable.options.fetchPolicy).toBe("cache-first"); + { + const result = await observable.refetch({ refetching: true }); - observable - .refetch({ - refetching: true, - }) - .then((result) => { - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 2, - opName: "EchoQuery", - opVars: { - refetching: true, - }, - }); - }) - .catch(reject); - } else if (count === 2) { - expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", linkCounter: 2, @@ -1250,34 +1229,30 @@ describe("nextFetchPolicy", () => { refetching: true, }, }); + } - expect(observable.options.fetchPolicy).toBe("cache-first"); - - observable - .reobserve({ - variables: { - refetching: false, + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, }, - }) - .then((result) => { - expect(result.loading).toBe(false); - expect(result.data.echo).toEqual({ - __typename: "Echo", - linkCounter: 2, - opName: "EchoQuery", - opVars: { - refetching: true, - }, - }); - }) - .catch(reject); + }, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); + + { + const result = await observable.reobserve({ + variables: { + refetching: false, + }, + }); - // The nextFetchPolicy function we provided always returnes cache-first, - // even when context.reason is variables-changed (which by default - // resets the fetchPolicy to context.initialFetchPolicy), so cache-first is - // still what we see here. - expect(observable.options.fetchPolicy).toBe("cache-first"); - } else if (count === 3) { expect(result.loading).toBe(false); expect(result.data.echo).toEqual({ __typename: "Echo", @@ -1288,24 +1263,42 @@ describe("nextFetchPolicy", () => { }, }); + // The nextFetchPolicy function we provided always returnes cache-first, + // even when context.reason is variables-changed (which by default + // resets the fetchPolicy to context.initialFetchPolicy), so cache-first is + // still what we see here. expect(observable.options.fetchPolicy).toBe("cache-first"); - setTimeout(resolve, 20); - } else { - reject(`Too many results (${count})`); } + + await expect(stream).toEmitMatchedValue({ + loading: false, + data: { + echo: { + __typename: "Echo", + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); + + await expect(stream).not.toEmitAnything(); }; checkNextFetchPolicy({ useDefaultOptions: false, fetchPolicy: "cache-and-network", nextFetchPolicy: nextFetchPolicyAlwaysCacheFirst, - onResult: onResultCacheAndNetworkAlwaysCacheFirst, + checkResult: onResultCacheAndNetworkAlwaysCacheFirst, }); checkNextFetchPolicy({ useDefaultOptions: true, fetchPolicy: "cache-and-network", nextFetchPolicy: nextFetchPolicyAlwaysCacheFirst, - onResult: onResultCacheAndNetworkAlwaysCacheFirst, + checkResult: onResultCacheAndNetworkAlwaysCacheFirst, }); }); diff --git a/src/link/http/__tests__/responseIterator.ts b/src/link/http/__tests__/responseIterator.ts index dca254c9821..bbebcad3c0f 100644 --- a/src/link/http/__tests__/responseIterator.ts +++ b/src/link/http/__tests__/responseIterator.ts @@ -1,31 +1,13 @@ import gql from "graphql-tag"; import { execute } from "../../core/execute"; import { HttpLink } from "../HttpLink"; -import { itAsync, subscribeAndCount } from "../../../testing"; -import type { Observable } from "zen-observable-ts"; -import { ObservableQuery } from "../../../core"; import { TextEncoder, TextDecoder } from "util"; import { ReadableStream } from "web-streams-polyfill"; import { Readable } from "stream"; +import { ObservableStream } from "../../../testing/internal"; var Blob = require("blob-polyfill").Blob; -function makeCallback( - resolve: () => void, - reject: (error: Error) => void, - callback: (...args: TArgs) => any -) { - return function () { - try { - // @ts-expect-error - callback.apply(this, arguments); - resolve(); - } catch (error) { - reject(error as Error); - } - } as typeof callback; -} - const sampleDeferredQuery = gql` query SampleDeferredQuery { stub { @@ -39,32 +21,6 @@ const sampleDeferredQuery = gql` const BOUNDARY = "gc0p4Jq0M2Yt08jU534c0p"; -function matchesResults( - resolve: () => void, - reject: (err: any) => void, - observable: Observable, - results: Array -) { - // TODO: adding a second observer to the observable will consume the - // observable. I want to test completion, but the subscribeAndCount API - // doesn’t have anything like that. - subscribeAndCount( - reject, - observable as unknown as ObservableQuery, - (count, result) => { - // subscribeAndCount is 1-indexed for some terrible reason. - if (0 >= count || count > results.length) { - reject(new Error("Unexpected result")); - } - - expect(result).toEqual(results[count - 1]); - if (count === results.length) { - resolve(); - } - } - ); -} - describe("multipart responses", () => { let originalTextDecoder: any; beforeAll(() => { @@ -203,7 +159,7 @@ describe("multipart responses", () => { }, ]; - itAsync("can handle whatwg stream bodies", (resolve, reject) => { + it("can handle whatwg stream bodies", async () => { const stream = new ReadableStream({ async start(controller) { const lines = bodyCustomBoundary.split("\r\n"); @@ -230,150 +186,171 @@ describe("multipart responses", () => { }); const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); }); - itAsync( - "can handle whatwg stream bodies with arbitrary splits", - (resolve, reject) => { - const stream = new ReadableStream({ - async start(controller) { - let chunks: Array = []; - let chunkSize = 15; - for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { - chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); - } + it("can handle whatwg stream bodies with arbitrary splits", async () => { + const stream = new ReadableStream({ + async start(controller) { + let chunks: Array = []; + let chunkSize = 15; + for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { + chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); + } - try { - for (const chunk of chunks) { - controller.enqueue(chunk); - } - } finally { - controller.close(); + try { + for (const chunk of chunks) { + controller.enqueue(chunk); } - }, - }); + } finally { + controller.close(); + } + }, + }); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - headers: new Headers({ - "content-type": `multipart/mixed; boundary=${BOUNDARY}`, - }), - })); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); - const link = new HttpLink({ - fetch: fetch as any, - }); + const link = new HttpLink({ + fetch: fetch as any, + }); - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - itAsync( - "can handle node stream bodies (strings) with default boundary", - (resolve, reject) => { - const stream = Readable.from( - bodyDefaultBoundary.split("\r\n").map((line) => line + "\r\n") - ); + await expect(observableStream).toComplete(); + }); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "content-type": `multipart/mixed`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); + it("can handle node stream bodies (strings) with default boundary", async () => { + const stream = Readable.from( + bodyDefaultBoundary.split("\r\n").map((line) => line + "\r\n") + ); - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "content-type": `multipart/mixed`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - - itAsync( - "can handle node stream bodies (strings) with arbitrary splits", - (resolve, reject) => { - let chunks: Array = []; - let chunkSize = 15; - for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { - chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); - } - const stream = Readable.from(chunks); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - headers: new Headers({ - "content-type": `multipart/mixed; boundary=${BOUNDARY}`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); + await expect(observableStream).toComplete(); + }); - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + it("can handle node stream bodies (strings) with arbitrary splits", async () => { + let chunks: Array = []; + let chunkSize = 15; + for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { + chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); } - ); + const stream = Readable.from(chunks); - itAsync( - "can handle node stream bodies (array buffers)", - (resolve, reject) => { - const stream = Readable.from( - bodyDefaultBoundary - .split("\r\n") - .map((line) => new TextEncoder().encode(line + "\r\n")) - ); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "content-type": `multipart/mixed`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - itAsync( - "can handle node stream bodies (array buffers) with batched results", - (resolve, reject) => { - const stream = Readable.from( - bodyBatchedResults - .split("\r\n") - .map((line) => new TextEncoder().encode(line + "\r\n")) - ); + await expect(observableStream).toComplete(); + }); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "content-type": `multipart/mixed;boundary="graphql";deferSpec=20220824`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); + it("can handle node stream bodies (array buffers)", async () => { + const stream = Readable.from( + bodyDefaultBoundary + .split("\r\n") + .map((line) => new TextEncoder().encode(line + "\r\n")) + ); - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, batchedResults); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "content-type": `multipart/mixed`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - itAsync("can handle streamable blob bodies", (resolve, reject) => { + await expect(observableStream).toComplete(); + }); + + it("can handle node stream bodies (array buffers) with batched results", async () => { + const stream = Readable.from( + bodyBatchedResults + .split("\r\n") + .map((line) => new TextEncoder().encode(line + "\r\n")) + ); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "content-type": `multipart/mixed;boundary="graphql";deferSpec=20220824`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of batchedResults) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); + }); + + it("can handle streamable blob bodies", async () => { const body = new Blob(bodyCustomBoundary.split("\r\n"), { type: "application/text", }); @@ -402,10 +379,16 @@ describe("multipart responses", () => { }); const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); }); - itAsync("can handle non-streamable blob bodies", (resolve, reject) => { + it("can handle non-streamable blob bodies", async () => { const body = new Blob( bodyCustomBoundary.split("\r\n").map((i) => i + "\r\n"), { type: "application/text" } @@ -424,10 +407,16 @@ describe("multipart responses", () => { }); const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); }); - itAsync("throws error on non-streamable body", (resolve, reject) => { + it("throws error on non-streamable body", async () => { // non-streamable body const body = 12345; const fetch = jest.fn(async () => ({ @@ -447,39 +436,36 @@ describe("multipart responses", () => { ), }; - observable.subscribe( - () => reject("next should not have been called"), - makeCallback(resolve, reject, (error) => { - expect(error).toEqual(mockError.throws); - }), - () => reject("complete should not have been called") - ); + const observableStream = new ObservableStream(observable); + + await expect(observableStream).toEmitError(mockError.throws); }); // test is still failing as observer.complete is called even after error is thrown - // itAsync('throws error on unsupported patch content type', (resolve, reject) => { - // const stream = Readable.from( - // bodyIncorrectChunkType.split("\r\n").map((line) => line + "\r\n") - // ); - // const fetch = jest.fn(async () => ({ - // status: 200, - // body: stream, - // headers: new Headers({ "content-type": `multipart/mixed; boundary=${BOUNDARY}` }), - // })); - // const link = new HttpLink({ - // fetch: fetch as any, - // }); - // const observable = execute(link, { query: sampleDeferredQuery }); - // const mockError = { throws: new Error('Unsupported patch content type: application/json is required') }; - - // observable.subscribe( - // () => reject('next should not have been called'), - // makeCallback(resolve, reject, (error) => { - // expect(error).toEqual(mockError.throws); - // }), - // () => reject('complete should not have been called'), - // ); - // }); + it.failing("throws error on unsupported patch content type", async () => { + const stream = Readable.from( + bodyIncorrectChunkType.split("\r\n").map((line) => line + "\r\n") + ); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + const observable = execute(link, { query: sampleDeferredQuery }); + const mockError = { + throws: new Error( + "Unsupported patch content type: application/json is required" + ), + }; + const observableStream = new ObservableStream(observable); + + await expect(observableStream).toEmitError(mockError.throws); + }); describe("without TextDecoder defined in the environment", () => { beforeAll(() => { @@ -491,37 +477,30 @@ describe("multipart responses", () => { globalThis.TextDecoder = originalTextDecoder; }); - itAsync( - "throws error if TextDecoder not defined in the environment", - (resolve, reject) => { - const stream = Readable.from( - bodyIncorrectChunkType.split("\r\n").map((line) => line + "\r\n") - ); - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - headers: new Headers({ - "content-type": `multipart/mixed; boundary=${BOUNDARY}`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); - const observable = execute(link, { query: sampleDeferredQuery }); - const mockError = { - throws: new Error( - "TextDecoder must be defined in the environment: please import a polyfill." - ), - }; - - observable.subscribe( - () => reject("next should not have been called"), - makeCallback(resolve, reject, (error) => { - expect(error).toEqual(mockError.throws); - }), - () => reject("complete should not have been called") - ); - } - ); + it("throws error if TextDecoder not defined in the environment", async () => { + const stream = Readable.from( + bodyIncorrectChunkType.split("\r\n").map((line) => line + "\r\n") + ); + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + const observable = execute(link, { query: sampleDeferredQuery }); + const mockError = { + throws: new Error( + "TextDecoder must be defined in the environment: please import a polyfill." + ), + }; + + const observableStream = new ObservableStream(observable); + + await expect(observableStream).toEmitError(mockError.throws); + }); }); }); diff --git a/src/link/http/__tests__/responseIteratorNoAsyncIterator.ts b/src/link/http/__tests__/responseIteratorNoAsyncIterator.ts index 312823cf099..4b3c1268753 100644 --- a/src/link/http/__tests__/responseIteratorNoAsyncIterator.ts +++ b/src/link/http/__tests__/responseIteratorNoAsyncIterator.ts @@ -1,11 +1,10 @@ import gql from "graphql-tag"; import { execute } from "../../core/execute"; import { HttpLink } from "../HttpLink"; -import { itAsync, subscribeAndCount } from "../../../testing"; -import type { Observable } from "zen-observable-ts"; import { TextEncoder, TextDecoder } from "util"; import { ReadableStream } from "web-streams-polyfill"; import { Readable } from "stream"; +import { ObservableStream } from "../../../testing/internal"; // As of Jest 26 there is no way to mock/unmock a module that is used indirectly // via a single test file. @@ -34,28 +33,6 @@ const sampleDeferredQuery = gql` const BOUNDARY = "gc0p4Jq0M2Yt08jU534c0p"; -function matchesResults( - resolve: () => void, - reject: (err: any) => void, - observable: Observable, - results: Array -) { - // TODO: adding a second observer to the observable will consume the - // observable. I want to test completion, but the subscribeAndCount API - // doesn’t have anything like that. - subscribeAndCount(reject, observable, (count, result) => { - // subscribeAndCount is 1-indexed for some terrible reason. - if (0 >= count || count > results.length) { - reject(new Error("Unexpected result")); - } - - expect(result).toEqual(results[count - 1]); - if (count === results.length) { - resolve(); - } - }); -} - describe("multipart responses", () => { let originalTextDecoder: any; beforeAll(() => { @@ -180,7 +157,7 @@ describe("multipart responses", () => { }, ]; - itAsync("can handle whatwg stream bodies", (resolve, reject) => { + it("can handle whatwg stream bodies", async () => { const stream = new ReadableStream({ async start(controller) { const lines = bodyCustomBoundary.split("\r\n"); @@ -207,146 +184,167 @@ describe("multipart responses", () => { }); const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); }); - itAsync( - "can handle whatwg stream bodies with arbitrary splits", - (resolve, reject) => { - const stream = new ReadableStream({ - async start(controller) { - let chunks: Array = []; - let chunkSize = 15; - for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { - chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); - } + it("can handle whatwg stream bodies with arbitrary splits", async () => { + const stream = new ReadableStream({ + async start(controller) { + let chunks: Array = []; + let chunkSize = 15; + for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { + chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); + } - try { - for (const chunk of chunks) { - controller.enqueue(chunk); - } - } finally { - controller.close(); + try { + for (const chunk of chunks) { + controller.enqueue(chunk); } - }, - }); - - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - headers: new Headers({ - "content-type": `multipart/mixed; boundary=${BOUNDARY}`, - }), - })); - - const link = new HttpLink({ - fetch: fetch as any, - }); - - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + } finally { + controller.close(); + } + }, + }); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); + + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - - itAsync( - "can handle node stream bodies (strings) with default boundary", - (resolve, reject) => { - const stream = Readable.from( - bodyDefaultBoundary.split("\r\n").map((line) => line + "\r\n") - ); - - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "content-type": `multipart/mixed`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); - - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + + await expect(observableStream).toComplete(); + }); + + it("can handle node stream bodies (strings) with default boundary", async () => { + const stream = Readable.from( + bodyDefaultBoundary.split("\r\n").map((line) => line + "\r\n") + ); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "content-type": `multipart/mixed`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - - itAsync( - "can handle node stream bodies (strings) with arbitrary splits", - (resolve, reject) => { - let chunks: Array = []; - let chunkSize = 15; - for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { - chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); - } - const stream = Readable.from(chunks); - - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - headers: new Headers({ - "content-type": `multipart/mixed; boundary=${BOUNDARY}`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); - - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + + await expect(observableStream).toComplete(); + }); + + it("can handle node stream bodies (strings) with arbitrary splits", async () => { + let chunks: Array = []; + let chunkSize = 15; + for (let i = 0; i < bodyCustomBoundary.length; i += chunkSize) { + chunks.push(bodyCustomBoundary.slice(i, i + chunkSize)); } - ); - - itAsync( - "can handle node stream bodies (array buffers)", - (resolve, reject) => { - const stream = Readable.from( - bodyDefaultBoundary - .split("\r\n") - .map((line) => new TextEncoder().encode(line + "\r\n")) - ); - - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "content-type": `multipart/mixed`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); - - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, results); + const stream = Readable.from(chunks); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + headers: new Headers({ + "content-type": `multipart/mixed; boundary=${BOUNDARY}`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); - - itAsync( - "can handle node stream bodies (array buffers) with batched results", - (resolve, reject) => { - const stream = Readable.from( - bodyBatchedResults - .split("\r\n") - .map((line) => new TextEncoder().encode(line + "\r\n")) - ); - - const fetch = jest.fn(async () => ({ - status: 200, - body: stream, - // if no boundary is specified, default to - - headers: new Headers({ - "Content-Type": `multipart/mixed;boundary="graphql";deferSpec=20220824`, - }), - })); - const link = new HttpLink({ - fetch: fetch as any, - }); - - const observable = execute(link, { query: sampleDeferredQuery }); - matchesResults(resolve, reject, observable, batchedResults); + + await expect(observableStream).toComplete(); + }); + + it("can handle node stream bodies (array buffers)", async () => { + const stream = Readable.from( + bodyDefaultBoundary + .split("\r\n") + .map((line) => new TextEncoder().encode(line + "\r\n")) + ); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "content-type": `multipart/mixed`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of results) { + await expect(observableStream).toEmitValue(result); } - ); + + await expect(observableStream).toComplete(); + }); + + it("can handle node stream bodies (array buffers) with batched results", async () => { + const stream = Readable.from( + bodyBatchedResults + .split("\r\n") + .map((line) => new TextEncoder().encode(line + "\r\n")) + ); + + const fetch = jest.fn(async () => ({ + status: 200, + body: stream, + // if no boundary is specified, default to - + headers: new Headers({ + "Content-Type": `multipart/mixed;boundary="graphql";deferSpec=20220824`, + }), + })); + const link = new HttpLink({ + fetch: fetch as any, + }); + + const observable = execute(link, { query: sampleDeferredQuery }); + const observableStream = new ObservableStream(observable); + + for (const result of batchedResults) { + await expect(observableStream).toEmitValue(result); + } + + await expect(observableStream).toComplete(); + }); }); diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx index d2db8511373..8e81130201f 100644 --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -23,7 +23,6 @@ import { MockedProvider, MockSubscriptionLink, mockSingleLink, - subscribeAndCount, MockedResponse, MockLink, } from "../../../testing"; @@ -37,8 +36,11 @@ import { expectTypeOf } from "expect-type"; import { Masked } from "../../../masking"; import { disableActEnvironment, + createRenderStream, renderHookToSnapshotStream, } from "@testing-library/react-render-stream"; +import { MutationTuple, QueryResult } from "../../types/types"; +import { invariant } from "../../../utilities/globals"; describe("useMutation Hook", () => { interface Todo { @@ -2174,317 +2176,405 @@ describe("useMutation Hook", () => { }); }); - itAsync( - "using onQueryUpdated callback should not prevent cache broadcast", - async (resolve, reject) => { - // Mutating this array makes the tests below much more difficult to reason - // about, so instead we reassign the numbersArray variable to remove - // elements, without mutating the previous array object. - let numbersArray: ReadonlyArray<{ id: string; value: number }> = [ - { id: "1", value: 324 }, - { id: "2", value: 729 }, - { id: "3", value: 987 }, - { id: "4", value: 344 }, - { id: "5", value: 72 }, - { id: "6", value: 899 }, - { id: "7", value: 222 }, - ]; + it("using onQueryUpdated callback should not prevent cache broadcast", async () => { + // Mutating this array makes the tests below much more difficult to reason + // about, so instead we reassign the numbersArray variable to remove + // elements, without mutating the previous array object. + let numbersArray: ReadonlyArray<{ id: string; value: number }> = [ + { id: "1", value: 324 }, + { id: "2", value: 729 }, + { id: "3", value: 987 }, + { id: "4", value: 344 }, + { id: "5", value: 72 }, + { id: "6", value: 899 }, + { id: "7", value: 222 }, + ]; - type TNumbersQuery = { - numbers: { - __typename: "NumbersResult"; + // Modifying this value means we can return a subset of our numbers array + // without needing to mutate or reassignn the original numbersArray. + let totalNumbers: number = numbersArray.length; + + type TNumbersQuery = { + numbers: { + __typename: "NumbersResult"; + id: string; + sum: number; + numbersArray: ReadonlyArray<{ id: string; - sum: number; - numbersArray: ReadonlyArray<{ - id: string; - value: number; - }>; - }; + value: number; + }>; }; + }; - function getNumbersData(): TNumbersQuery { - return { - numbers: { - __typename: "NumbersResult", - id: "numbersId", - numbersArray, - sum: numbersArray.reduce((sum, b) => sum + b.value, 0), - }, - }; - } + function getNumbersData(length: number = totalNumbers): TNumbersQuery { + const numbers = numbersArray.slice(0, length); - const link = new ApolloLink((operation) => { - return new Observable((observer) => { + return { + numbers: { + __typename: "NumbersResult", + id: "numbersId", + numbersArray: numbers, + sum: numbers.reduce((sum, b) => sum + b.value, 0), + }, + }; + } + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + setTimeout(() => { const { operationName } = operation; if (operationName === "NumbersQuery") { observer.next({ data: getNumbersData(), }); } else if (operationName === "RemoveNumberMutation") { - const last = numbersArray[numbersArray.length - 1]; - numbersArray = numbersArray.slice(0, -1); observer.next({ data: { - removeLastNumber: last, + removeLastNumber: getLastNumber(), }, }); + + totalNumbers--; } - setTimeout(() => { - observer.complete(); - }, 50); - }); + observer.complete(); + }, 50); }); + }); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ - typePolicies: { - NumbersResult: { - fields: { - numbersArray: { merge: false }, - sum(_, { readField }) { - const numbersArray = - readField( - "numbersArray" - ); - return (numbersArray || []).reduce( - (sum, item) => sum + item.value, - 0 + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + typePolicies: { + NumbersResult: { + fields: { + numbersArray: { merge: false }, + sum(_, { readField }) { + const numbersArray = + readField( + "numbersArray" ); - }, + return (numbersArray || []).reduce( + (sum, item) => sum + item.value, + 0 + ); }, }, }, - }), - }); + }, + }), + }); - const NumbersQuery: TypedDocumentNode = gql` - query NumbersQuery { - numbers { + const NumbersQuery: TypedDocumentNode = gql` + query NumbersQuery { + numbers { + id + sum + numbersArray { id - sum - numbersArray { - id - value - } + value } } - `; + } + `; - const RemoveNumberMutation = gql` - mutation RemoveNumberMutation { - removeLastNumber { - id - } + const RemoveNumberMutation = gql` + mutation RemoveNumberMutation { + removeLastNumber { + id } - `; + } + `; - const { result } = renderHook( - () => ({ - query: useQuery(NumbersQuery, { - notifyOnNetworkStatusChange: true, - }), + const renderStream = createRenderStream({ + initialSnapshot: { + useQueryResult: null as QueryResult | null, + useMutationResult: null as MutationTuple | null, + }, + }); - mutation: useMutation(RemoveNumberMutation, { - update(cache) { - const oldData = cache.readQuery({ query: NumbersQuery }); - cache.writeQuery({ - query: NumbersQuery, - data: - oldData ? - { - ...oldData, - numbers: { - ...oldData.numbers, - numbersArray: oldData.numbers.numbersArray.slice( - 0, - -1 - ), - }, - } - : { - numbers: { - __typename: "NumbersResult", - id: "numbersId", - sum: 0, - numbersArray: [], - }, + function App() { + renderStream.mergeSnapshot({ + useQueryResult: useQuery(NumbersQuery, { + notifyOnNetworkStatusChange: true, + }), + useMutationResult: useMutation(RemoveNumberMutation, { + update(cache) { + const oldData = cache.readQuery({ query: NumbersQuery }); + cache.writeQuery({ + query: NumbersQuery, + data: + oldData ? + { + ...oldData, + numbers: { + ...oldData.numbers, + numbersArray: oldData.numbers.numbersArray.slice(0, -1), }, - }); - }, - }), + } + : { + numbers: { + __typename: "NumbersResult", + id: "numbersId", + sum: 0, + numbersArray: [], + }, + }, + }); + }, }), - { - wrapper: ({ children }) => ( - {children} - ), - } - ); + }); - const obsQueryMap = client.getObservableQueries(); - expect(obsQueryMap.size).toBe(1); - const observedResults: Array<{ data: TNumbersQuery }> = []; - subscribeAndCount( - reject, - obsQueryMap.values().next().value, - (count, result: { data: TNumbersQuery }) => { - observedResults.push(result); - expect(observedResults.length).toBe(count); - const data = getNumbersData(); - - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - } else if (count === 2) { - expect(data.numbers.numbersArray.length).toBe(7); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); - } else if (count === 3) { - expect(data.numbers.numbersArray.length).toBe(6); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); - } else if (count === 4) { - expect(data.numbers.numbersArray.length).toBe(5); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); + return null; + } - // This line is the only way to finish this test successfully. - setTimeout(resolve, 50); - } else { - // If we did not return false from the final onQueryUpdated function, - // we would receive an additional result here. - reject( - `too many renders (${count}); final result: ${JSON.stringify( - result - )}` - ); - } - } - ); + using _disabledAct = disableActEnvironment(); + await renderStream.render(, { + wrapper: ({ children }) => ( + {children} + ), + }); + + async function getNextSnapshot() { + const { snapshot } = await renderStream.takeRender(); + + invariant(snapshot.useQueryResult); + invariant(snapshot.useMutationResult); + + return { + useQueryResult: snapshot.useQueryResult, + useMutationResult: snapshot.useMutationResult, + }; + } + + function getLastNumber() { + const numbers = numbersArray.slice(0, totalNumbers); + + return numbers[numbers.length - 1]; + } + + expect(getLastNumber()).toEqual({ id: "7", value: 222 }); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + + expect(useQueryResult.loading).toBe(true); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.loading); + expect(useQueryResult.data).toBeUndefined(); + + expect(mutationResult.loading).toBe(false); + expect(mutationResult.called).toBe(false); + expect(mutationResult.data).toBeUndefined(); + } + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(7); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); - expect(observedResults).toEqual([]); + expect(mutationResult.loading).toBe(false); + expect(mutationResult.called).toBe(false); + expect(mutationResult.data).toBeUndefined(); + } + + const [mutate] = + renderStream.getCurrentRender().snapshot.useMutationResult!; + + let promise = mutate(); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(7); - expect(result.current.query.loading).toBe(true); - expect(result.current.query.networkStatus).toBe(NetworkStatus.loading); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(false); - await waitFor( - () => { - expect(result.current.query.loading).toBe(false); + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); + + expect(mutationResult.loading).toBe(true); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toBeUndefined(); + } + + // Not passing an onQueryUpdated callback should allow cache + // broadcasts to propagate as normal. The point of this test is to + // demonstrate that *adding* onQueryUpdated should not prevent cache + // broadcasts (see below for where we test that). + await expect(promise).resolves.toEqual({ + data: { + removeLastNumber: { + id: "7", }, - { interval: 1 } - ); + }, + }); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(false); + expect(getLastNumber()).toEqual({ id: "6", value: 899 }); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: "7", - value: 222, - }); + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); - const [mutate] = result.current.mutation; - await act(async () => { - expect( - await mutate() - // Not passing an onQueryUpdated callback should allow cache - // broadcasts to propagate as normal. The point of this test is to - // demonstrate that *adding* onQueryUpdated should not prevent cache - // broadcasts (see below for where we test that). - ).toEqual({ - data: { - removeLastNumber: { - id: "7", - }, - }, - }); - }); + expect(data.numbers.numbersArray).toHaveLength(6); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: "6", - value: 899, - }); + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); + expect(mutationResult.loading).toBe(true); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toBeUndefined(); + } - await act(async () => { - expect( - await mutate({ - // Adding this onQueryUpdated callback, which merely examines the - // updated query and its DiffResult, should not change the broadcast - // behavior of the ObservableQuery. - onQueryUpdated(oq, diff) { - expect(oq.queryName).toBe("NumbersQuery"); - expect(diff.result.numbers.numbersArray.length).toBe(5); - expect(diff.result.numbers.sum).toBe(2456); - }, - }) - ).toEqual({ - data: { - removeLastNumber: { - id: "6", - }, - }, - }); - }); + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: "5", - value: 72, - }); + expect(data.numbers.numbersArray).toHaveLength(6); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); - await act(async () => { - expect( - await mutate({ - onQueryUpdated(oq, diff) { - expect(oq.queryName).toBe("NumbersQuery"); - expect(diff.result.numbers.numbersArray.length).toBe(4); - expect(diff.result.numbers.sum).toBe(2384); - // Returning false from onQueryUpdated prevents the cache broadcast. - return false; - }, - }) - ).toEqual({ - data: { - removeLastNumber: { - id: "5", - }, - }, - }); - }); + expect(mutationResult.loading).toBe(false); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toEqual({ removeLastNumber: { id: "7" } }); + } - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: "4", - value: 344, - }); + promise = mutate({ + // Adding this onQueryUpdated callback, which merely examines the + // updated query and its DiffResult, should not change the broadcast + // behavior of the ObservableQuery. + onQueryUpdated(oq, diff) { + expect(oq.queryName).toBe("NumbersQuery"); + expect(diff.result.numbers.numbersArray.length).toBe(5); + expect(diff.result.numbers.sum).toBe(2456); + }, + }); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); + expect(data.numbers.numbersArray).toHaveLength(6); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); + + expect(mutationResult.loading).toBe(true); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toBeUndefined(); } - ); + + await expect(promise).resolves.toEqual({ + data: { + removeLastNumber: { + id: "6", + }, + }, + }); + + expect(getLastNumber()).toEqual({ id: "5", value: 72 }); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(5); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); + + expect(mutationResult.loading).toBe(true); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toBeUndefined(); + } + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(5); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); + + expect(mutationResult.loading).toBe(false); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toEqual({ removeLastNumber: { id: "6" } }); + } + + promise = mutate({ + onQueryUpdated(oq, diff) { + expect(oq.queryName).toBe("NumbersQuery"); + expect(diff.result.numbers.numbersArray.length).toBe(4); + expect(diff.result.numbers.sum).toBe(2384); + // Returning false from onQueryUpdated prevents the cache broadcast. + return false; + }, + }); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(5); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + expect(useQueryResult.data).toEqual(data); + + expect(mutationResult.loading).toBe(true); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toBeUndefined(); + } + + await expect(promise).resolves.toEqual({ + data: { + removeLastNumber: { + id: "5", + }, + }, + }); + + expect(getLastNumber()).toEqual({ id: "4", value: 344 }); + + { + const { useQueryResult, useMutationResult } = await getNextSnapshot(); + const [, mutationResult] = useMutationResult; + const data = getNumbersData(); + + expect(data.numbers.numbersArray).toHaveLength(4); + + expect(useQueryResult.loading).toBe(false); + expect(useQueryResult.networkStatus).toBe(NetworkStatus.ready); + // This mutation did not braodcast results, so we expect our numbers to + // equal the previous set. + expect(useQueryResult.data).toEqual(getNumbersData(5)); + + expect(mutationResult.loading).toBe(false); + expect(mutationResult.called).toBe(true); + expect(mutationResult.data).toEqual({ removeLastNumber: { id: "5" } }); + } + + await expect(renderStream).not.toRerender(); + }); it("refetchQueries should work with BatchHttpLink", async () => { const MUTATION_1 = gql` diff --git a/src/testing/internal/ObservableStream.ts b/src/testing/internal/ObservableStream.ts index e7d8bd3e757..63f550827c6 100644 --- a/src/testing/internal/ObservableStream.ts +++ b/src/testing/internal/ObservableStream.ts @@ -1,7 +1,7 @@ import type { Observable } from "../../utilities/index.js"; import { ReadableStream } from "node:stream/web"; -interface TakeOptions { +export interface TakeOptions { timeout?: number; } type ObservableEvent = diff --git a/src/testing/matchers/index.d.ts b/src/testing/matchers/index.d.ts index 7dc37ea8bb7..3bd1f606dae 100644 --- a/src/testing/matchers/index.d.ts +++ b/src/testing/matchers/index.d.ts @@ -4,8 +4,9 @@ import type { OperationVariables, } from "../../core/index.js"; import type { QueryRef } from "../../react/index.js"; -import { NextRenderOptions } from "../internal/index.js"; +import { NextRenderOptions, ObservableStream } from "../internal/index.js"; import { RenderStreamMatchers } from "@testing-library/react-render-stream/expect"; +import { TakeOptions } from "../internal/ObservableStream.js"; interface ApolloCustomMatchers { /** @@ -34,6 +35,38 @@ interface ApolloCustomMatchers { toBeGarbageCollected: T extends WeakRef ? () => Promise : { error: "matcher needs to be called on a WeakRef instance" }; + + toComplete: T extends ObservableStream ? + (options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + toEmitAnything: T extends ObservableStream ? + (options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + toEmitError: T extends ObservableStream ? + (error?: any, options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + /** + * Used to determine if the observable stream emitted a `next` event. Use + * `toEmitValue` to check if the `next` event emitted a specific value. + */ + toEmitNext: T extends ObservableStream ? + (options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + toEmitValue: T extends ObservableStream ? + (value: any, options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + toEmitValueStrict: T extends ObservableStream ? + (value: any, options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; + + toEmitMatchedValue: T extends ObservableStream ? + (value: any, options?: TakeOptions) => Promise + : { error: "matcher needs to be called on an ObservableStream instance" }; } declare global { diff --git a/src/testing/matchers/index.ts b/src/testing/matchers/index.ts index 1ba62d09215..149a20c1cf4 100644 --- a/src/testing/matchers/index.ts +++ b/src/testing/matchers/index.ts @@ -3,8 +3,22 @@ import { toMatchDocument } from "./toMatchDocument.js"; import { toHaveSuspenseCacheEntryUsing } from "./toHaveSuspenseCacheEntryUsing.js"; import { toBeGarbageCollected } from "./toBeGarbageCollected.js"; import { toBeDisposed } from "./toBeDisposed.js"; +import { toComplete } from "./toComplete.js"; +import { toEmitAnything } from "./toEmitAnything.js"; +import { toEmitError } from "./toEmitError.js"; +import { toEmitMatchedValue } from "./toEmitMatchedValue.js"; +import { toEmitNext } from "./toEmitNext.js"; +import { toEmitValue } from "./toEmitValue.js"; +import { toEmitValueStrict } from "./toEmitValueStrict.js"; expect.extend({ + toComplete, + toEmitAnything, + toEmitError, + toEmitMatchedValue, + toEmitNext, + toEmitValue, + toEmitValueStrict, toBeDisposed, toHaveSuspenseCacheEntryUsing, toMatchDocument, diff --git a/src/testing/matchers/toComplete.ts b/src/testing/matchers/toComplete.ts new file mode 100644 index 00000000000..e017fd9ff8f --- /dev/null +++ b/src/testing/matchers/toComplete.ts @@ -0,0 +1,33 @@ +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toComplete: MatcherFunction<[options?: TakeOptions]> = + async function (actual, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint("toComplete", "stream", ""); + + try { + await stream.takeComplete(options); + + return { + pass: true, + message: () => { + return hint + "\n\nExpected stream not to complete but it did."; + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to complete but it did not.", + }; + } else { + throw error; + } + } + }; diff --git a/src/testing/matchers/toEmitAnything.ts b/src/testing/matchers/toEmitAnything.ts new file mode 100644 index 00000000000..0096caf455d --- /dev/null +++ b/src/testing/matchers/toEmitAnything.ts @@ -0,0 +1,38 @@ +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitAnything: MatcherFunction<[options?: TakeOptions]> = + async function (actual, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint("toEmitAnything", "stream", ""); + + try { + const value = await stream.take(options); + + return { + pass: true, + message: () => { + return ( + hint + + "\n\nExpected stream not to emit anything but it did." + + "\n\nReceived:\n" + + this.utils.printReceived(value) + ); + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit an event but it did not.", + }; + } else { + throw error; + } + } + }; diff --git a/src/testing/matchers/toEmitError.ts b/src/testing/matchers/toEmitError.ts new file mode 100644 index 00000000000..75e93aa56f2 --- /dev/null +++ b/src/testing/matchers/toEmitError.ts @@ -0,0 +1,62 @@ +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitError: MatcherFunction< + [value?: any, options?: TakeOptions] +> = async function (actual, expected, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint( + this.isNot ? ".not.toEmitError" : "toEmitError", + "stream", + "error" + ); + + try { + const error = await stream.takeError(options); + const pass = + expected === undefined ? true : ( + this.equals(expected, error, this.customTesters) + ); + + return { + pass, + message: () => { + if (pass) { + return ( + hint + + "\n\nExpected stream not to emit error but it did." + + `\n\nReceived:` + + `\n` + + this.utils.printReceived(error) + ); + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + expected, + error, + "Expected", + "Recieved", + true + ) + ); + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit an error but it did not.", + }; + } else { + throw error; + } + } +}; diff --git a/src/testing/matchers/toEmitMatchedValue.ts b/src/testing/matchers/toEmitMatchedValue.ts new file mode 100644 index 00000000000..70f636926ad --- /dev/null +++ b/src/testing/matchers/toEmitMatchedValue.ts @@ -0,0 +1,67 @@ +import { + iterableEquality, + subsetEquality, + getObjectSubset, +} from "@jest/expect-utils"; +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitMatchedValue: MatcherFunction< + [value: any, options?: TakeOptions] +> = async function (actual, expected, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint( + "toEmitMatchedValue", + "stream", + "expected", + { isNot: this.isNot, promise: this.promise } + ); + + try { + const value = await stream.takeNext(options); + const pass = this.equals( + value, + expected, + // https://github.com/jestjs/jest/blob/22029ba06b69716699254bb9397f2b3bc7b3cf3b/packages/expect/src/matchers.ts#L923-L927 + [...this.customTesters, iterableEquality, subsetEquality] + ); + + return { + pass, + message: () => { + if (pass) { + return ( + hint + + "\n\nExpected stream not to emit a value equal to expected but it did." + ); + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + expected, + getObjectSubset(value, expected, this.customTesters), + "Expected", + "Recieved", + true + ) + ); + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit a value but it did not.", + }; + } else { + throw error; + } + } +}; diff --git a/src/testing/matchers/toEmitNext.ts b/src/testing/matchers/toEmitNext.ts new file mode 100644 index 00000000000..884d8a1b30e --- /dev/null +++ b/src/testing/matchers/toEmitNext.ts @@ -0,0 +1,37 @@ +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitNext: MatcherFunction<[options?: TakeOptions]> = + async function (actual, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint( + this.isNot ? ".not.toEmitValue" : "toEmitValue", + "stream", + "expected" + ); + + try { + await stream.takeNext(options); + + return { + pass: true, + message: () => { + return hint + "\n\nExpected stream not to emit a value but it did."; + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit a value but it did not.", + }; + } else { + throw error; + } + } + }; diff --git a/src/testing/matchers/toEmitValue.ts b/src/testing/matchers/toEmitValue.ts new file mode 100644 index 00000000000..8fad71ff92d --- /dev/null +++ b/src/testing/matchers/toEmitValue.ts @@ -0,0 +1,59 @@ +import { iterableEquality } from "@jest/expect-utils"; +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitValue: MatcherFunction<[value: any, options?: TakeOptions]> = + async function (actual, expected, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint( + this.isNot ? ".not.toEmitValue" : "toEmitValue", + "stream", + "expected" + ); + + try { + const value = await stream.takeNext(options); + const pass = this.equals(value, expected, [ + ...this.customTesters, + iterableEquality, + ]); + + return { + pass, + message: () => { + if (pass) { + return ( + hint + + "\n\nExpected stream not to emit a value equal to expected but it did." + ); + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + expected, + value, + "Expected", + "Recieved", + true + ) + ); + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit a value but it did not.", + }; + } else { + throw error; + } + } + }; diff --git a/src/testing/matchers/toEmitValueStrict.ts b/src/testing/matchers/toEmitValueStrict.ts new file mode 100644 index 00000000000..505a4880e29 --- /dev/null +++ b/src/testing/matchers/toEmitValueStrict.ts @@ -0,0 +1,74 @@ +import { + arrayBufferEquality, + iterableEquality, + sparseArrayEquality, + typeEquality, +} from "@jest/expect-utils"; +import type { MatcherFunction } from "expect"; +import type { ObservableStream } from "../internal/index.js"; +import type { TakeOptions } from "../internal/ObservableStream.js"; + +export const toEmitValueStrict: MatcherFunction< + [value: any, options?: TakeOptions] +> = async function (actual, expected, options) { + const stream = actual as ObservableStream; + const hint = this.utils.matcherHint( + this.isNot ? ".not.toEmitMatchedValue" : "toEmitMatchedValue", + "stream", + "expected" + ); + + try { + const value = await stream.takeNext(options); + const pass = this.equals( + value, + expected, + // https://github.com/jestjs/jest/blob/22029ba06b69716699254bb9397f2b3bc7b3cf3b/packages/expect/src/matchers.ts#L62-L67 + [ + ...this.customTesters, + iterableEquality, + typeEquality, + sparseArrayEquality, + arrayBufferEquality, + ], + true + ); + + return { + pass, + message: () => { + if (pass) { + return ( + hint + + "\n\nExpected stream not to emit a value equal to expected but it did." + ); + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + expected, + value, + "Expected", + "Recieved", + true + ) + ); + }, + }; + } catch (error) { + if ( + error instanceof Error && + error.message === "Timeout waiting for next event" + ) { + return { + pass: false, + message: () => + hint + "\n\nExpected stream to emit a value but it did not.", + }; + } else { + throw error; + } + } +};