diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e4197c57b3..d7e7d473256 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,53 @@ +## Apollo Client 3.2.0 + +## Bug Fixes + +- Use `options.nextFetchPolicy` internally to restore original `FetchPolicy` after polling with `fetchPolicy: "network-only"`, so that polling does not interfere with normal query watching.
+ [@benjamn](https://github.com/benjamn) in [#6893](https://github.com/apollographql/apollo-client/pull/6893) + +- Initialize `ObservableQuery` in `updateObservableQuery` even if `skip` is `true`.
+ [@mu29](https://github.com/mu29) in [#6999](https://github.com/apollographql/apollo-client/pull/6999) + +- Prevent full reobservation of queries affected by optimistic mutation updates, while still delivering results from the cache.
+ [@benjamn](https://github.com/benjamn) in [#6854](https://github.com/apollographql/apollo-client/pull/6854) + +## Improvements + +- In TypeScript, all APIs that take `DocumentNode` parameters now may alternatively take `TypeDocumentNode`. This type has the same JavaScript representation but allows the APIs to infer the data and variable types instead of requiring you to specify types explicitly at the call site.
+ [@dotansimha](https://github.com/dotansimha) in [#6720](https://github.com/apollographql/apollo-client/pull/6720) + +- Bring back an improved form of heuristic fragment matching, by allowing `possibleTypes` to specify subtype regular expression strings, which count as matches if the written result object has all the fields expected for the fragment.
+ [@benjamn](https://github.com/benjamn) in [#6901](https://github.com/apollographql/apollo-client/pull/6901) + +- Allow `options.nextFetchPolicy` to be a function that takes the current `FetchPolicy` and returns a new (or the same) `FetchPolicy`, making `nextFetchPolicy` more suitable for global use in `defaultOptions.watchQuery`.
+ [@benjamn](https://github.com/benjamn) in [#6893](https://github.com/apollographql/apollo-client/pull/6893) + +- Implement `useReactiveVar` hook for consuming reactive variables in React components.
+ [@benjamn](https://github.com/benjamn) in [#6867](https://github.com/apollographql/apollo-client/pull/6867) + +- Move `apollo-link-persisted-queries` implementation to `@apollo/client/link/persisted-queries`. Try running our [automated imports transform](https://github.com/apollographql/apollo-client/tree/main/codemods/ac2-to-ac3) to handle this conversion, if you're using `apollo-link-persisted-queries`.
+ [@hwillson](https://github.com/hwillson) in [#6837](https://github.com/apollographql/apollo-client/pull/6837) + +- Disable feud-stopping logic after any `cache.evict` or `cache.modify` operation.
+ [@benjamn](https://github.com/benjamn) in + [#6817](https://github.com/apollographql/apollo-client/pull/6817) and + [#6898](https://github.com/apollographql/apollo-client/pull/6898) + +- Throw if `writeFragment` cannot identify `options.data` when no `options.id` provided.
+ [@jcreighton](https://github.com/jcreighton) in [#6859](https://github.com/apollographql/apollo-client/pull/6859) + +- Provide `options.storage` object to `cache.modify` functions, as provided to `read` and `merge` functions.
+ [@benjamn](https://github.com/benjamn) in [#6991](https://github.com/apollographql/apollo-client/pull/6991) + +- Allow `cache.modify` functions to return `details.INVALIDATE` (similar to `details.DELETE`) to invalidate the current field, causing affected queries to rerun, even if the field's value is unchanged.
+ [@benjamn](https://github.com/benjamn) in [#6991](https://github.com/apollographql/apollo-client/pull/6991) + +- Support non-default `ErrorPolicy` values (that is, `"ignore"` and `"all"`, in addition to the default value `"none"`) for mutations and subscriptions, like we do for queries.
+ [@benjamn](https://github.com/benjamn) in [#7003](https://github.com/apollographql/apollo-client/pull/7003) + +- Remove invariant forbidding a `FetchPolicy` of `cache-only` in `ObservableQuery#refetch`.
+ [@benjamn](https://github.com/benjamn) in [ccb0a79a](https://github.com/apollographql/apollo-client/pull/6774/commits/ccb0a79a588721f08bf87a131c31bf37fa3238e5), fixing [#6702](https://github.com/apollographql/apollo-client/issues/6702) + ## Apollo Client 3.1.5 ## Bug Fixes diff --git a/codemods/ac2-to-ac3/examples/link-packages.js b/codemods/ac2-to-ac3/examples/link-packages.js index 5ad2211c91a..c8bd3f41ba8 100644 --- a/codemods/ac2-to-ac3/examples/link-packages.js +++ b/codemods/ac2-to-ac3/examples/link-packages.js @@ -4,6 +4,10 @@ import { BatchLink } from 'apollo-link-batch'; import { BatchHttpLink } from 'apollo-link-batch-http'; import { setContext } from 'apollo-link-context'; import { ErrorLink } from 'apollo-link-error'; +import { + VERSION, + createPersistedQueryLink, +} from 'apollo-link-persisted-queries'; import { RetryLink } from 'apollo-link-retry'; import { WebSocketLink } from 'apollo-link-ws'; // This package was unusual for having a default export. diff --git a/codemods/ac2-to-ac3/imports.js b/codemods/ac2-to-ac3/imports.js index 47435119534..af4fe7ea2a8 100644 --- a/codemods/ac2-to-ac3/imports.js +++ b/codemods/ac2-to-ac3/imports.js @@ -34,6 +34,7 @@ export default function transformer(file, api) { 'batch-http', 'context', 'error', + 'persisted-queries', 'retry', 'schema', 'ws', diff --git a/config/entryPoints.js b/config/entryPoints.js index 4c1a5cf1396..cb016135b90 100644 --- a/config/entryPoints.js +++ b/config/entryPoints.js @@ -9,6 +9,7 @@ const entryPoints = [ { dirs: ['link', 'core'] }, { dirs: ['link', 'error'] }, { dirs: ['link', 'http'] }, + { dirs: ['link', 'persisted-queries'] }, { dirs: ['link', 'retry'] }, { dirs: ['link', 'schema'] }, { dirs: ['link', 'utils'] }, diff --git a/docs/gatsby-config.js b/docs/gatsby-config.js index 799a210f664..4932aa1e533 100644 --- a/docs/gatsby-config.js +++ b/docs/gatsby-config.js @@ -102,7 +102,8 @@ module.exports = { 'api/link/apollo-link-rest', 'api/link/apollo-link-retry', 'api/link/apollo-link-schema', - 'api/link/apollo-link-ws' + 'api/link/apollo-link-ws', + 'api/link/persisted-queries' ], }, }, diff --git a/docs/source/api/cache/InMemoryCache.mdx b/docs/source/api/cache/InMemoryCache.mdx index 57f69fe9652..66f77a60150 100644 --- a/docs/source/api/cache/InMemoryCache.mdx +++ b/docs/source/api/cache/InMemoryCache.mdx @@ -207,6 +207,7 @@ The first parameter of a modifier function is the current value of the field bei | `canRead` | `CanReadFunction` | Returns `true` for non-normalized `StoreObjects` and non-dangling `Reference`s, indicating that `readField(name, objOrRef)` has a chance of working. Useful for filtering out dangling references from lists. | | `isReference` | `boolean` | Utility to check if an object is a `{ __ref }` object. | | `DELETE` | `any` | Sentinel object that can be returned from a modifier function to delete the field being modified. | +| `INVALIDATE` | `any` | Sentinel object that can be returned from a modifier function to invalidate the field, causing affected queries to rerun, without changing or deleting the field value. | `Modifier` functions should return the value that is to be written into the cache for the field being modified, or a `DELETE` sentinel to remove the field. diff --git a/docs/source/api/link/persisted-queries.md b/docs/source/api/link/persisted-queries.md new file mode 100644 index 00000000000..4ed8e3f4266 --- /dev/null +++ b/docs/source/api/link/persisted-queries.md @@ -0,0 +1,164 @@ +--- +title: Persisted Queries Link +description: Replace full queries with generated ID's to reduce bandwidth. +--- + +## Problem to solve + +Unlike REST APIs that use a fixed URL to load data, GraphQL provides a rich query language that can be used to express the shape of application data requirements. This is a marvelous advancement in technology, but it comes at a cost: GraphQL query strings are often much longer than REST URLS — in some cases by many kilobytes. + +In practice we've seen GraphQL query sizes ranging well above 10 KB *just for the query text*. This is significant overhead when compared with a simple URL of 50-100 characters. When paired with the fact that the uplink speed from the client is typically the most bandwidth-constrained part of the chain, large queries can become bottlenecks for client performance. + +Automatic Persisted Queries solves this problem by sending a generated ID instead of the query text as the request. + +For more information about this solution, read [this article announcing Automatic Persisted Queries](https://www.apollographql.com/blog/improve-graphql-performance-with-automatic-persisted-queries-c31d27b8e6ea/). + +## How it works + +1. When the client makes a query, it will optimistically send a short (64-byte) cryptographic hash instead of the full query text. +2. If the backend recognizes the hash, it will retrieve the full text of the query and execute it. +3. If the backend doesn't recognize the hash, it will ask the client to send the hash and the query text so it can store them mapped together for future lookups. During this request, the backend will also fulfill the data request. + +This library is a client implementation for use with Apollo Client by using custom Apollo Link. + +## Installation + +This link is included in the `@apollo/client` package: + +`npm install @apollo/client` + +If you do not already have a SHA-256 based hashing function available in your application, you will need to install one separately. For example: + +`npm install crypto-hash` + +This link doesn't include a SHA-256 hash function by default, to avoid forcing one as a dependency. Developers should pick the most appropriate SHA-256 function (sync or async) for their needs/environment. + +## Usage + +The persisted query link requires using the `HttpLink`. The easiest way to use them together is to `concat` them into a single link. + +```js +import { HttpLink, InMemoryCache, ApolloClient } from "@apollo/client"; +import { createPersistedQueryLink } from "@apollo/client/link/persisted-queries"; +import { sha256 } from 'crypto-hash'; + +const httpLink = new HttpLink({ uri: "/graphql" }); +const persistedQueriesLink = createPersistedQueryLink({ sha256 }); +const client = new ApolloClient({ + cache: new InMemoryCache(), + link: persistedQueriesLink.concat(httpLink); +}); +``` + +Thats it! Now your client will start sending query signatures instead of the full text resulting in improved network performance! + +#### Options + +The `createPersistedQueryLink` function takes a configuration object: + +- `sha256`: a SHA-256 hashing function. Can be sync or async. Providing a SHA-256 hashing function is required, unless you're defining a fully custom hashing approach via `generateHash`. +- `generateHash`: an optional function that takes the query document and returns the hash. If provided this custom function will override the default hashing approach that uses the supplied `sha256` function. If not provided, the persisted queries link will use a fallback hashing approach leveraging the `sha256` function. +- `useGETForHashedQueries`: set to `true` to use the HTTP `GET` method when sending the hashed version of queries (but not for mutations). `GET` requests are not compatible with `@apollo/client/link/batch-http`. +> If you want to use `GET` for non-mutation queries whether or not they are hashed, pass `useGETForQueries: true` option to `HttpLink` instead. If you want to use `GET` for all requests, pass `fetchOptions: {method: 'GET'}` to `HttpLink`. +- `disable`: a function which takes an `ErrorResponse` (see below) and returns a boolean to disable any future persisted queries for that session. This defaults to disabling on `PersistedQueryNotSupported` or a 400 or 500 http error. + +**ErrorResponse** + +The argument that the optional `disable` function is given is an object with the following keys: + +- `operation`: The Operation that encountered an error (contains `query`, `variables`, `operationName`, and `context`). +- `response`: The Execution of the response (contains `data` and `errors` as well `extensions` if sent from the server). +- `graphQLErrors`: An array of errors from the GraphQL endpoint. +- `networkError`: Any error during the link execution or server response. + +*Note*: `networkError` is the value from the downlink's `error` callback. In most cases, `graphQLErrors` is the `errors` field of the result from the last `next` call. A `networkError` can contain additional fields, such as a GraphQL object in the case of a failing HTTP status code from `@apollo/link/http`. In this situation, `graphQLErrors` is an alias for `networkError.result.errors` if the property exists. + +## Apollo Studio + +Apollo Studio supports receiving and fulfilling Automatic Persisted Queries. Simply adding this link into your client app will improve your network response times when using Apollo Studio. + +### Protocol + +Automatic Persisted Queries are made up of three parts: the query signature, error responses, and the negotiation protocol. + +**Query Signature** + +The query signature for Automatic Persisted Queries is sent through the `extensions` field of a request from the client. This is a transport independent way to send extra information along with the operation. + +```js +{ + operationName: 'MyQuery', + variables: null, + extensions: { + persistedQuery: { + version: 1, + sha256Hash: hashOfQuery + } + } +} +``` + +When sending an Automatic Persisted Query, the client omits the `query` field normally present, and instead sends an extension field with a `persistedQuery` object as shown above. The hash algorithm defaults to a `sha256` hash of the query string. + +If the client needs to register the hash, the query signature will be the same but include the full query text like so: + +```js +{ + operationName: 'MyQuery', + variables: null, + query: `query MyQuery { id }`, + extensions: { + persistedQuery: { + version: 1, + sha256Hash: hashOfQuery + } + } +} +``` + +This should only happen once across all clients when a new query is introduced into your application. + +**Error Responses** + +When the initial query signature is received by a backend, if it is unable to find the hash previously stored, it will send back the following response signature: + +```js +{ + errors: [ + { message: 'PersistedQueryNotFound' } + ] +} +``` + +If the backend doesn't support Automatic Persisted Queries, or does not want to support it for that particular client, it can send back the following which will tell the client to stop trying to send hashes: + +``` +{ + errors: [ + { message: 'PersistedQueryNotSupported' } + ] +} +``` + +**Negotiation Protocol** + +In order to support Automatic Persisted Queries, the client and server must follow the negotiation steps as outlined here: + +*Happy Path* +1. Client sends query signature with no `query` field +2. Server looks up query based on hash, if found, it resolves the data +3. Client receives data and completes request + +*Missing hash path* +1. Client sends query signature with no `query` field +2. Server looks up query based on hash, none is found +3. Server responds with NotFound error response +4. Client sends both hash and query string to Server +5. Server fulfills response and saves query string + hash for future lookup +6. Client receives data and completes request + +### Build time generation + +If you want to avoid hashing in the browser, you can use a build script to include the hash as part of the request, then pass a function to retrieve that hash when the operation is run. This works well with projects like [GraphQL Persisted Document Loader](https://github.com/leoasis/graphql-persisted-document-loader) which uses webpack to generate hashes at build time. + +If you use the above loader, you can pass `{ generateHash: ({ documentId }) => documentId }` to the `createPersistedQueryLink` call. diff --git a/docs/source/caching/cache-interaction.md b/docs/source/caching/cache-interaction.md index d81f9df8474..b2195f31340 100644 --- a/docs/source/caching/cache-interaction.md +++ b/docs/source/caching/cache-interaction.md @@ -316,6 +316,36 @@ cache.modify({ }); ``` +### Example: Invalidating fields within a cached object + +Normally, changing or deleting a field's value also _invalidates_ the field, causing watched queries to be reread if they previously consumed the field. + +Using `cache.modify`, it's also possible to invalidate the field without changing or deleting its value, by returning the `INVALIDATE` sentinel: + +```js +cache.modify({ + id: cache.identify(myPost), + fields: { + comments(existingCommentRefs, { INVALIDATE }) { + return INVALIDATE; + }, + }, +}); +``` + +If you need to invalidate all fields within the given object, you can pass a modifier function as the value of the `fields` option: + +```js +cache.modify({ + id: cache.identify(myPost), + fields(fieldValue, details) { + return details.INVALIDATE; + }, +}); +``` + +When using this form of `cache.modify`, you can determine the individual field names using `details.fieldName`. This technique works for any modifier function, not just those that return `INVALIDATE`. + ## Obtaining an object's custom ID If a type in your cache uses a [custom identifier](./cache-configuration/#customizing-identifier-generation-by-type) (or even if it doesn't), you can use the `cache.identify` method to obtain the identifier for an object of that type. This method takes an object and computes its ID based on both its `__typename` and its identifier field(s). This means you don't have to keep track of which fields make up each type's identifier. diff --git a/docs/source/local-state/managing-state-with-field-policies.mdx b/docs/source/local-state/managing-state-with-field-policies.mdx index a337fef7882..7951a8b9f26 100644 --- a/docs/source/local-state/managing-state-with-field-policies.mdx +++ b/docs/source/local-state/managing-state-with-field-policies.mdx @@ -147,15 +147,14 @@ This `read` function returns the value of our reactive variable whenever `cartIt Now, let's create a button component that enables the user to add a product to their cart: -```jsx{8}:title=AddToCartButton.js +```jsx{7}:title=AddToCartButton.js import { cartItemsVar } from './cache'; // ... other imports export function AddToCartButton({ productId }) { - const cartItems = cartItemsVar(); return (
-
@@ -167,8 +166,6 @@ On click, this button updates the value of `cartItemsVar` to append the button's Here's a `Cart` component that uses the `GET_CART_ITEMS` query and therefore refreshes automatically whenever the value of `cartItemsVar` changes: - - ```jsx:title=Cart.js export const GET_CART_ITEMS = gql` query GetCartItems { @@ -199,7 +196,32 @@ export function Cart() { } ``` - +Alternatively, you can read directly from a reactive variable using the `useReactiveVar` hook introduced in Apollo Client 3.2.0: + +```jsx:title=Cart.js +import { useReactiveVar } from '@apollo/client'; + +export function Cart() { + const cartItems = useReactiveVar(cartItemsVar); + + return ( +
+
My Cart
+ {cartItems.length === 0 ? ( +

No items in your cart

+ ) : ( + + {cartItems.map(productId => ( + + ))} + + )} +
+ ); +} +``` + +As in the earlier `useQuery` example, whenever the `cartItemsVar` variable is updated, any currently-mounted `Cart` components will rerender. Calling `cartItemsVar()` without `useReactiveVar` will not capture this dependency, so future variable updates will not rerender the component. Both of these approaches are useful in different situations. ### Storing local state in the cache diff --git a/package-lock.json b/package-lock.json index fa8be92613d..7896a55e987 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "3.1.5", + "version": "3.2.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -1345,6 +1345,11 @@ } } }, + "@graphql-typed-document-node/core": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.1.0.tgz", + "integrity": "sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg==" + }, "@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -2321,9 +2326,9 @@ } }, "@types/babel__traverse": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.0.13.tgz", - "integrity": "sha512-i+zS7t6/s9cdQvbqKDARrcbrPvtJGlbYsMkazo03nTAK3RX9FNrLllXys22uiTGJapPOTZTQ35nHh4ISph4SLQ==", + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.0.14.tgz", + "integrity": "sha512-8w9szzKs14ZtBVuP6Wn7nMLRJ0D6dfB0VEBEyRgxrZ/Ln49aNMykrghM2FaNn4FJRzNppCSa0Rv9pBRM5Xc3wg==", "dev": true, "requires": { "@babel/types": "^7.3.0" @@ -2508,9 +2513,9 @@ "dev": true }, "@types/prettier": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.0.2.tgz", - "integrity": "sha512-IkVfat549ggtkZUthUzEX49562eGikhSYeVGX97SkMFn+sTZrgRewXjQ4tPKFPCykZHkX1Zfd9OoELGqKU2jJA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.1.0.tgz", + "integrity": "sha512-hiYA88aHiEIgDmeKlsyVsuQdcFn3Z2VuFd/Xm/HCnGnPD8UFU5BM128uzzRVVGEzKDKYUrRsRH9S2o+NUy/3IA==", "dev": true }, "@types/prop-types": { @@ -3752,6 +3757,12 @@ "which": "^1.2.9" } }, + "crypto-hash": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/crypto-hash/-/crypto-hash-1.3.0.tgz", + "integrity": "sha512-lyAZ0EMyjDkVvz8WOeVnuCPvKVBXcMv1l5SVqO1yC7PzTwrD/pPje/BIRbWhMoPe436U+Y2nD7f5bFx0kt+Sbg==", + "dev": true + }, "cssom": { "version": "0.4.4", "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz", @@ -3821,20 +3832,20 @@ } }, "webidl-conversions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", - "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz", + "integrity": "sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==", "dev": true }, "whatwg-url": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.1.0.tgz", - "integrity": "sha512-vEIkwNi9Hqt4TV9RdnaBPNt+E2Sgmo3gePebCRgZ1R7g6d23+53zCTnuB0amKI4AXq6VM8jj2DUAa0S1vjJxkw==", + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.2.2.tgz", + "integrity": "sha512-PcVnO6NiewhkmzV0qn7A+UZ9Xx4maNTI+O+TShmfE4pqjoCMwUMjkvoNhNHPTvgR7QH9Xt3R13iHuWy2sToFxQ==", "dev": true, "requires": { "lodash.sortby": "^4.7.0", "tr46": "^2.0.2", - "webidl-conversions": "^5.0.0" + "webidl-conversions": "^6.1.0" } } } @@ -6168,6 +6179,16 @@ } } }, + "jest-fetch-mock": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/jest-fetch-mock/-/jest-fetch-mock-3.0.3.tgz", + "integrity": "sha512-Ux1nWprtLrdrH4XwE7O7InRY6psIi3GOsqNESJgMJ+M5cv4A8Lh7SN9d2V2kKRZ8ebAfcd1LNyZguAOb6JiDqw==", + "dev": true, + "requires": { + "cross-fetch": "^3.0.4", + "promise-polyfill": "^8.1.3" + } + }, "jest-get-type": { "version": "25.2.6", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-25.2.6.tgz", @@ -7678,22 +7699,14 @@ "dev": true }, "whatwg-url": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.1.0.tgz", - "integrity": "sha512-vEIkwNi9Hqt4TV9RdnaBPNt+E2Sgmo3gePebCRgZ1R7g6d23+53zCTnuB0amKI4AXq6VM8jj2DUAa0S1vjJxkw==", + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.2.2.tgz", + "integrity": "sha512-PcVnO6NiewhkmzV0qn7A+UZ9Xx4maNTI+O+TShmfE4pqjoCMwUMjkvoNhNHPTvgR7QH9Xt3R13iHuWy2sToFxQ==", "dev": true, "requires": { "lodash.sortby": "^4.7.0", "tr46": "^2.0.2", - "webidl-conversions": "^5.0.0" - }, - "dependencies": { - "webidl-conversions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", - "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", - "dev": true - } + "webidl-conversions": "^6.1.0" } } } @@ -7710,6 +7723,12 @@ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", "dev": true }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -8633,6 +8652,12 @@ "asap": "~2.0.3" } }, + "promise-polyfill": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/promise-polyfill/-/promise-polyfill-8.1.3.tgz", + "integrity": "sha512-MG5r82wBzh7pSKDRa9y+vllNHz3e3d4CNj1PQE4BQYxLme0gKYYBm9YENq+UkEikyZ0XbiGWxYlVw3Rl9O/U8g==", + "dev": true + }, "prompts": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.3.2.tgz", @@ -8764,14 +8789,14 @@ }, "dependencies": { "parse-json": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.1.tgz", - "integrity": "sha512-ztoZ4/DYeXQq4E21v169sC8qWINGpcosGv9XhTDvg9/hWvx/zrFkc9BiWxR58OJLHGk28j5BL0SDLeV2WmFZlQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.1.0.tgz", + "integrity": "sha512-+mi/lmVVNKFNVyLXV31ERiy2CY5E1/F6QtJFEzoChPRwwngMNXRDQ9GJ5WdE2Z2P4AujsOi0/+2qHID68KwfIQ==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1", + "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" } }, @@ -9439,54 +9464,6 @@ "jest-worker": "^26.2.1", "serialize-javascript": "^4.0.0", "terser": "^5.0.0" - }, - "dependencies": { - "@babel/code-frame": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz", - "integrity": "sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==", - "dev": true, - "requires": { - "@babel/highlight": "^7.10.4" - } - }, - "@babel/highlight": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.4.tgz", - "integrity": "sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==", - "dev": true, - "requires": { - "@babel/helper-validator-identifier": "^7.10.4", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } } }, "rsvp": { @@ -9985,9 +9962,9 @@ } }, "source-map-support": { - "version": "0.5.16", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", - "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" @@ -10328,9 +10305,9 @@ } }, "terser": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.2.0.tgz", - "integrity": "sha512-nZ9TWhBznZdlww3borgJyfQDrxzpgd0RlRNoxR63tMVry01lIH/zKQDTTiaWRMGowydfvSHMgyiGyn6A9PSkCQ==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.2.1.tgz", + "integrity": "sha512-/AOtjRtAMNGO0fIF6m8HfcvXTw/2AKpsOzDn36tA5RfhRdeXyb4RvHxJ5Pah7iL6dFkLk+gOnCaNHGwJPl6TrQ==", "requires": { "commander": "^2.20.0", "source-map": "~0.6.1", @@ -10693,9 +10670,9 @@ } }, "uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.0.tgz", + "integrity": "sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g==", "dev": true, "requires": { "punycode": "^2.1.0" @@ -11114,14 +11091,6 @@ "requires": { "camelcase": "^5.0.0", "decamelize": "^1.2.0" - }, - "dependencies": { - "decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true - } } }, "yn": { diff --git a/package.json b/package.json index f7e2a1c5d77..d24a4de6eaa 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apollo/client", - "version": "3.1.5", + "version": "3.2.0", "description": "A fully-featured caching GraphQL client.", "private": true, "keywords": [ @@ -53,7 +53,7 @@ { "name": "apollo-client", "path": "./dist/apollo-client.cjs.min.js", - "maxSize": "24.5 kB" + "maxSize": "24.7 kB" } ], "peerDependencies": { @@ -67,6 +67,7 @@ } }, "dependencies": { + "@graphql-typed-document-node/core": "^3.0.0", "@types/zen-observable": "^0.8.0", "@wry/context": "^0.5.2", "@wry/equality": "^0.2.0", @@ -97,11 +98,13 @@ "@types/recompose": "^0.30.7", "bundlesize": "0.18.0", "cross-fetch": "3.0.6", + "crypto-hash": "^1.3.0", "fetch-mock": "7.7.3", "glob": "7.1.6", "graphql": "15.3.0", "graphql-tools": "^6.0.12", "jest": "26.4.2", + "jest-fetch-mock": "^3.0.3", "jest-junit": "11.1.0", "lodash": "4.17.20", "react": "^16.13.1", diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts index d73d0a31c0b..2f8a1870f18 100644 --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -13,6 +13,7 @@ import { ApolloLink } from '../link/core'; import { HttpLink } from '../link/http'; import { InMemoryCache } from '../cache'; import { stripSymbols } from '../testing'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; describe('ApolloClient', () => { describe('constructor', () => { @@ -1210,6 +1211,22 @@ describe('ApolloClient', () => { } describe('using writeQuery', () => { + it('with TypedDocumentNode', async () => { + const client = newClient(); + + // This is defined manually for the purpose of the test, but + // eventually this could be generated with graphql-code-generator + const typedQuery: TypedDocumentNode = query; + + // The result and variables are being typed automatically, based on the query object we pass, + // and type inference is done based on the TypeDocumentNode object. + const result = await client.query({ query: typedQuery, variables: { testVar: 'foo' } }); + + // Just try to access it, if something will break, TS will throw an error + // during the test + result.data?.people.friends[0].id; + }); + it('with a replacement of nested array (wq)', done => { let count = 0; const client = newClient(); diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap index 91326b9ce76..b1cf1084965 100644 --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -52,6 +52,7 @@ Array [ "useLazyQuery", "useMutation", "useQuery", + "useReactiveVar", "useSubscription", ] `; @@ -173,6 +174,13 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/link/persisted-queries 1`] = ` +Array [ + "VERSION", + "createPersistedQueryLink", +] +`; + exports[`exports of public entry points @apollo/client/link/retry 1`] = ` Array [ "RetryLink", @@ -216,6 +224,7 @@ Array [ "useLazyQuery", "useMutation", "useQuery", + "useReactiveVar", "useSubscription", ] `; @@ -262,6 +271,7 @@ Array [ "useLazyQuery", "useMutation", "useQuery", + "useReactiveVar", "useSubscription", ] `; diff --git a/src/__tests__/__snapshots__/mutationResults.ts.snap b/src/__tests__/__snapshots__/mutationResults.ts.snap new file mode 100644 index 00000000000..ebcfff7bb25 --- /dev/null +++ b/src/__tests__/__snapshots__/mutationResults.ts.snap @@ -0,0 +1,40 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`mutation results should write results to cache according to errorPolicy 1`] = `Object {}`; + +exports[`mutation results should write results to cache according to errorPolicy 2`] = ` +Object { + "Person:{\\"name\\":\\"Jenn Creighton\\"}": Object { + "__typename": "Person", + "name": "Jenn Creighton", + }, + "ROOT_MUTATION": Object { + "__typename": "Mutation", + "newPerson({\\"name\\":\\"Jenn Creighton\\"})": Object { + "__ref": "Person:{\\"name\\":\\"Jenn Creighton\\"}", + }, + }, +} +`; + +exports[`mutation results should write results to cache according to errorPolicy 3`] = ` +Object { + "Person:{\\"name\\":\\"Ellen Shapiro\\"}": Object { + "__typename": "Person", + "name": "Ellen Shapiro", + }, + "Person:{\\"name\\":\\"Jenn Creighton\\"}": Object { + "__typename": "Person", + "name": "Jenn Creighton", + }, + "ROOT_MUTATION": Object { + "__typename": "Mutation", + "newPerson({\\"name\\":\\"Ellen Shapiro\\"})": Object { + "__ref": "Person:{\\"name\\":\\"Ellen Shapiro\\"}", + }, + "newPerson({\\"name\\":\\"Jenn Creighton\\"})": Object { + "__ref": "Person:{\\"name\\":\\"Jenn Creighton\\"}", + }, + }, +} +`; diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts index 66653ce4319..f015f25f7bf 100644 --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -3172,6 +3172,96 @@ describe('@connection', () => { }); }); + itAsync('allows setting nextFetchPolicy in defaultOptions', (resolve, reject) => { + let networkCounter = 0; + let nextFetchPolicyCallCount = 0; + + const client = new ApolloClient({ + link: new ApolloLink(operation => new Observable(observer => { + observer.next({ + data: { + count: networkCounter++, + }, + }); + observer.complete(); + })), + + cache: new InMemoryCache, + + defaultOptions: { + watchQuery: { + nextFetchPolicy(fetchPolicy) { + expect(++nextFetchPolicyCallCount).toBe(1); + expect(this.query).toBe(query); + expect(fetchPolicy).toBe("cache-first"); + // Usually options.nextFetchPolicy applies only once, but a + // nextFetchPolicy function can set this.nextFetchPolicy + // again to perform an additional transition. + this.nextFetchPolicy = fetchPolicy => { + ++nextFetchPolicyCallCount; + expect(fetchPolicy).toBe("cache-and-network"); + return "cache-first"; + }; + return "cache-and-network"; + }, + }, + }, + }); + + const query = gql` + query { + count + } + `; + + client.writeQuery({ + query, + data: { + count: "initial", + }, + }); + + const obs = client.watchQuery({ query }); + + subscribeAndCount(reject, obs, (handleCount, result) => { + if (handleCount === 1) { + expect(nextFetchPolicyCallCount).toBe(1); + expect(result.data).toEqual({ count: "initial" }); + // Refetching makes a copy of the current options, which + // includes options.nextFetchPolicy, so the inner + // nextFetchPolicy function ends up getting called twice. + obs.refetch(); + } else if (handleCount === 2) { + expect(result.data).toEqual({ count: "initial" }); + expect(nextFetchPolicyCallCount).toBe(2); + } else if (handleCount === 3) { + expect(result.data).toEqual({ count: 0 }); + expect(nextFetchPolicyCallCount).toBe(2); + client.writeQuery({ + query, + data: { + count: "secondary", + }, + }); + } else if (handleCount === 4) { + expect(result.data).toEqual({ count: "secondary" }); + expect(nextFetchPolicyCallCount).toBe(3); + } else if (handleCount === 5) { + expect(result.data).toEqual({ count: 1 }); + expect(nextFetchPolicyCallCount).toBe(3); + client.cache.evict({ fieldName: "count" }); + } else if (handleCount === 6) { + expect(result.data).toEqual({ count: 2 }); + expect(nextFetchPolicyCallCount).toBe(3); + expect(obs.options.fetchPolicy).toBe("cache-first"); + expect(obs.options.nextFetchPolicy).toBeUndefined(); + setTimeout(resolve, 50); + } else { + reject("too many results"); + } + }); + }); + itAsync('allows setting default options for query', (resolve, reject) => { const errors = [{ message: 'failure', name: 'failure' }]; const link = mockSingleLink({ diff --git a/src/__tests__/exports.ts b/src/__tests__/exports.ts index 5f896488d05..13e00747a14 100644 --- a/src/__tests__/exports.ts +++ b/src/__tests__/exports.ts @@ -8,6 +8,7 @@ import * as linkContext from "../link/context"; import * as linkCore from "../link/core"; import * as linkError from "../link/error"; import * as linkHTTP from "../link/http"; +import * as linkPersistedQueries from "../link/persisted-queries"; import * as linkRetry from "../link/retry"; import * as linkSchema from "../link/schema"; import * as linkUtils from "../link/utils"; @@ -47,6 +48,7 @@ describe('exports of public entry points', () => { check("@apollo/client/link/core", linkCore); check("@apollo/client/link/error", linkError); check("@apollo/client/link/http", linkHTTP); + check("@apollo/client/link/persisted-queries", linkPersistedQueries); check("@apollo/client/link/retry", linkRetry); check("@apollo/client/link/schema", linkSchema); check("@apollo/client/link/utils", linkUtils); diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts index 9d87841f9fe..ed36c587cba 100644 --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -1,5 +1,6 @@ import { cloneDeep } from 'lodash'; import gql from 'graphql-tag'; +import { GraphQLError } from 'graphql'; import { ApolloClient } from '../core'; import { InMemoryCache } from '../cache'; @@ -306,6 +307,99 @@ describe('mutation results', () => { }); }); + itAsync("should write results to cache according to errorPolicy", async (resolve, reject) => { + const expectedFakeError = new GraphQLError("expected/fake error"); + + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Person: { + keyFields: ["name"], + }, + }, + }), + + link: new ApolloLink(operation => new Observable(observer => { + observer.next({ + errors: [ + expectedFakeError, + ], + data: { + newPerson: { + __typename: "Person", + name: operation.variables.newName, + }, + }, + }); + observer.complete(); + })).setOnError(reject), + }); + + const mutation = gql` + mutation AddNewPerson($newName: String!) { + newPerson(name: $newName) { + name + } + } + `; + + await client.mutate({ + mutation, + variables: { + newName: "Hugh Willson", + }, + }).then(() => { + reject("should have thrown for default errorPolicy"); + }, error => { + expect(error.message).toBe(expectedFakeError.message); + }); + + expect(client.cache.extract()).toMatchSnapshot(); + + const ignoreErrorsResult = await client.mutate({ + mutation, + errorPolicy: "ignore", + variables: { + newName: "Jenn Creighton", + }, + }); + + expect(ignoreErrorsResult).toEqual({ + data: { + newPerson: { + __typename: "Person", + name: "Jenn Creighton", + }, + }, + }); + + expect(client.cache.extract()).toMatchSnapshot(); + + const allErrorsResult = await client.mutate({ + mutation, + errorPolicy: "all", + variables: { + newName: "Ellen Shapiro", + }, + }); + + expect(allErrorsResult).toEqual({ + data: { + newPerson: { + __typename: "Person", + name: "Ellen Shapiro", + }, + }, + errors: [ + expectedFakeError, + ], + }); + + expect(client.cache.extract()).toMatchSnapshot(); + + resolve(); + }); + itAsync("should warn when the result fields don't match the query fields", (resolve, reject) => { let handle: any; let subscriptionHandle: Subscription; diff --git a/src/__tests__/optimistic.ts b/src/__tests__/optimistic.ts index 956b14d8da5..28a7e82d7e4 100644 --- a/src/__tests__/optimistic.ts +++ b/src/__tests__/optimistic.ts @@ -1334,7 +1334,7 @@ describe('optimistic mutation results', () => { client.watchQuery({ query }) as any as ObservableInput, ).pipe( map(value => stripSymbols(value.data.todoList.todos)), - take(4), + take(5), toArray(), ).toPromise(); @@ -1360,6 +1360,10 @@ describe('optimistic mutation results', () => { expect(responses).toEqual([ defaultTodos, + [ + customOptimisticResponse1.createTodo, + ...defaultTodos, + ], [ customOptimisticResponse2.createTodo, customOptimisticResponse1.createTodo, @@ -1811,7 +1815,7 @@ describe('optimistic mutation results', () => { client.watchQuery({ query }) as any as ObservableInput, ).pipe( map(value => stripSymbols(value.data.todoList.todos)), - take(4), + take(5), toArray(), ).toPromise(); @@ -1833,6 +1837,10 @@ describe('optimistic mutation results', () => { const defaultTodos = stripSymbols(result.data.todoList.todos); expect(responses).toEqual([ defaultTodos, + [ + customOptimisticResponse1.createTodo, + ...defaultTodos, + ], [ customOptimisticResponse2.createTodo, customOptimisticResponse1.createTodo, @@ -1937,6 +1945,7 @@ describe('optimistic mutation results', () => { expect(optimisticDiffs).toEqual([ { complete: true, + fromOptimisticTransaction: true, result: { items: manualItems, }, @@ -2099,12 +2108,14 @@ describe('optimistic mutation results', () => { expect(optimisticDiffs).toEqual([ { complete: true, + fromOptimisticTransaction: true, result: { items: manualItems, }, }, { complete: true, + fromOptimisticTransaction: true, result: { items: [...manualItems, optimisticItem], }, diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts index 6babc4aeb58..19eb8dd6127 100644 --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -15,7 +15,7 @@ export abstract class ApolloCache implements DataProxy { // required to implement // core API public abstract read( - query: Cache.ReadOptions, + query: Cache.ReadOptions, ): T | null; public abstract write( write: Cache.WriteOptions, @@ -104,7 +104,7 @@ export abstract class ApolloCache implements DataProxy { * @param optimistic */ public readQuery( - options: DataProxy.Query, + options: DataProxy.Query, optimistic: boolean = false, ): QueryType | null { return this.read({ @@ -120,7 +120,7 @@ export abstract class ApolloCache implements DataProxy { private getFragmentDoc = wrap(getFragmentQueryDocument); public readFragment( - options: DataProxy.Fragment, + options: DataProxy.Fragment, optimistic: boolean = false, ): FragmentType | null { return this.read({ diff --git a/src/cache/core/types/Cache.ts b/src/cache/core/types/Cache.ts index 21c2535538e..8a9af7c3ff1 100644 --- a/src/cache/core/types/Cache.ts +++ b/src/cache/core/types/Cache.ts @@ -4,15 +4,15 @@ import { Modifier, Modifiers } from './common'; export namespace Cache { export type WatchCallback = (diff: Cache.DiffResult) => void; - export interface ReadOptions - extends DataProxy.Query { + export interface ReadOptions + extends DataProxy.Query { rootId?: string; previousResult?: any; optimistic: boolean; } export interface WriteOptions - extends DataProxy.Query { + extends DataProxy.Query { dataId?: string; result: TResult; broadcast?: boolean; diff --git a/src/cache/core/types/DataProxy.ts b/src/cache/core/types/DataProxy.ts index 2e02ae6c42e..a256829d09f 100644 --- a/src/cache/core/types/DataProxy.ts +++ b/src/cache/core/types/DataProxy.ts @@ -1,15 +1,16 @@ import { DocumentNode } from 'graphql'; // eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { MissingFieldError } from './common'; export namespace DataProxy { - export interface Query { + export interface Query { /** * The GraphQL query shape to be used constructed using the `gql` template * string tag from `graphql-tag`. The query will be used to determine the * shape of the data to be read. */ - query: DocumentNode; + query: DocumentNode | TypedDocumentNode; /** * Any variables that the GraphQL query may depend on. @@ -24,7 +25,7 @@ export namespace DataProxy { id?: string; } - export interface Fragment { + export interface Fragment { /** * The root id to be used. This id should take the same form as the * value returned by your `dataIdFromObject` function. If a value with your @@ -38,7 +39,7 @@ export namespace DataProxy { * the shape of data to read. If you provide more than one fragment in this * document then you must also specify `fragmentName` to select a single. */ - fragment: DocumentNode; + fragment: DocumentNode | TypedDocumentNode; /** * The name of the fragment in your GraphQL document to be used. If you do @@ -54,7 +55,7 @@ export namespace DataProxy { } export interface WriteQueryOptions - extends Query { + extends Query { /** * The data you will be writing to the store. */ @@ -66,7 +67,7 @@ export namespace DataProxy { } export interface WriteFragmentOptions - extends Fragment { + extends Fragment { /** * The data you will be writing to the store. */ @@ -81,6 +82,7 @@ export namespace DataProxy { result?: T; complete?: boolean; missing?: MissingFieldError[]; + fromOptimisticTransaction?: boolean; } } @@ -95,7 +97,7 @@ export interface DataProxy { * Reads a GraphQL query from the root query id. */ readQuery( - options: DataProxy.Query, + options: DataProxy.Query, optimistic?: boolean, ): QueryType | null; @@ -105,7 +107,7 @@ export interface DataProxy { * provided to select the correct fragment. */ readFragment( - options: DataProxy.Fragment, + options: DataProxy.Fragment, optimistic?: boolean, ): FragmentType | null; diff --git a/src/cache/core/types/common.ts b/src/cache/core/types/common.ts index 7d58ebc3769..87fb2c066a5 100644 --- a/src/cache/core/types/common.ts +++ b/src/cache/core/types/common.ts @@ -7,6 +7,8 @@ import { isReference, } from '../../../utilities'; +import { StorageType } from '../../inmemory/policies'; + // The Readonly type only really works for object types, since it marks // all of the object's properties as readonly, but there are many cases when // a generic type parameter like TExisting might be a string or some other @@ -21,6 +23,7 @@ export class MissingFieldError { public readonly message: string, public readonly path: (string | number)[], public readonly query: import('graphql').DocumentNode, + public readonly clientOnly: boolean, public readonly variables?: Record, ) {} } @@ -54,12 +57,14 @@ export type CanReadFunction = (value: StoreValue) => boolean; export type Modifier = (value: T, details: { DELETE: any; + INVALIDATE: any; fieldName: string; storeFieldName: string; readField: ReadFieldFunction; canRead: CanReadFunction; isReference: typeof isReference; toReference: ToReferenceFunction; + storage: StorageType; }) => T; export type Modifiers = { diff --git a/src/cache/inmemory/__tests__/__snapshots__/cache.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/cache.ts.snap index b03d1ba007a..b436838194b 100644 --- a/src/cache/inmemory/__tests__/__snapshots__/cache.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/cache.ts.snap @@ -227,3 +227,84 @@ Object { }, } `; + +exports[`InMemoryCache#modify should allow invalidation using details.INVALIDATE 1`] = ` +Object { + "Author:{\\"name\\":\\"Maria Dahvana Headley\\"}": Object { + "__typename": "Author", + "name": "Maria Dahvana Headley", + }, + "Book:{\\"isbn\\":\\"0374110034\\"}": Object { + "__typename": "Book", + "author": Object { + "__ref": "Author:{\\"name\\":\\"Maria Dahvana Headley\\"}", + }, + "isbn": "0374110034", + "title": "Beowulf: A New Translation", + }, + "ROOT_QUERY": Object { + "__typename": "Query", + "currentlyReading": Object { + "__ref": "Book:{\\"isbn\\":\\"0374110034\\"}", + }, + }, +} +`; + +exports[`TypedDocumentNode should determine Data and Variables types of {write,read}{Query,Fragment} 1`] = ` +Object { + "Author:{\\"name\\":\\"John C. Mitchell\\"}": Object { + "__typename": "Author", + "name": "John C. Mitchell", + }, + "Book:{\\"isbn\\":\\"0262133210\\"}": Object { + "__typename": "Book", + "author": Object { + "__ref": "Author:{\\"name\\":\\"John C. Mitchell\\"}", + }, + "isbn": "0262133210", + "title": "Foundations for Programming Languages", + }, + "ROOT_QUERY": Object { + "__typename": "Query", + "book({\\"isbn\\":\\"0262133210\\"})": Object { + "__ref": "Book:{\\"isbn\\":\\"0262133210\\"}", + }, + }, +} +`; + +exports[`TypedDocumentNode should determine Data and Variables types of {write,read}{Query,Fragment} 2`] = ` +Object { + "Author:{\\"name\\":\\"Harold Abelson\\"}": Object { + "__typename": "Author", + "name": "Harold Abelson", + }, + "Author:{\\"name\\":\\"John C. Mitchell\\"}": Object { + "__typename": "Author", + "name": "John C. Mitchell", + }, + "Book:{\\"isbn\\":\\"0262133210\\"}": Object { + "__typename": "Book", + "author": Object { + "__ref": "Author:{\\"name\\":\\"John C. Mitchell\\"}", + }, + "isbn": "0262133210", + "title": "Foundations for Programming Languages", + }, + "Book:{\\"isbn\\":\\"0262510871\\"}": Object { + "__typename": "Book", + "author": Object { + "__ref": "Author:{\\"name\\":\\"Harold Abelson\\"}", + }, + "isbn": "0262510871", + "title": "Structure and Interpretation of Computer Programs", + }, + "ROOT_QUERY": Object { + "__typename": "Query", + "book({\\"isbn\\":\\"0262133210\\"})": Object { + "__ref": "Book:{\\"isbn\\":\\"0262133210\\"}", + }, + }, +} +`; diff --git a/src/cache/inmemory/__tests__/__snapshots__/fragmentMatcher.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/fragmentMatcher.ts.snap new file mode 100644 index 00000000000..0b0c04b9abf --- /dev/null +++ b/src/cache/inmemory/__tests__/__snapshots__/fragmentMatcher.ts.snap @@ -0,0 +1,29 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`policies.fragmentMatches can infer fuzzy subtypes heuristically 1`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "objects": Array [ + Object { + "__typename": "E", + "c": "ce", + }, + Object { + "__typename": "F", + "c": "cf", + }, + Object { + "__typename": "G", + "c": "cg", + }, + Object { + "__typename": "TooLong", + }, + Object { + "__typename": "H", + }, + ], + }, +} +`; diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap index afba80cb302..f26a0b9f885 100644 --- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap @@ -830,3 +830,25 @@ Object { }, } `; + +exports[`type policies field policies read, merge, and modify functions can access options.storage 1`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "mergeModify": 11, + "mergeRead": 1, + "mergeReadModify": 101, + }, +} +`; + +exports[`type policies field policies read, merge, and modify functions can access options.storage 2`] = ` +Object { + "ROOT_QUERY": Object { + "__typename": "Query", + "mergeModify": 11, + "mergeRead": 1, + "mergeReadModify": 101, + }, +} +`; diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts index f877e68ba66..c5b707d6ffc 100644 --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -2,7 +2,8 @@ import gql, { disableFragmentWarnings } from 'graphql-tag'; import { stripSymbols } from '../../../utilities/testing/stripSymbols'; import { cloneDeep } from '../../../utilities/common/cloneDeep'; -import { makeReference, Reference, makeVar } from '../../../core'; +import { makeReference, Reference, makeVar, TypedDocumentNode, isReference } from '../../../core'; +import { Cache } from '../../../cache'; import { InMemoryCache, InMemoryCacheConfig } from '../inMemoryCache'; disableFragmentWarnings(); @@ -1644,6 +1645,104 @@ describe("InMemoryCache#modify", () => { expect(resultAfterModify).toEqual({ a: 1, b: -1, c: 0 }); }); + it("should allow invalidation using details.INVALIDATE", () => { + const cache = new InMemoryCache({ + typePolicies: { + Book: { + keyFields: ["isbn"], + }, + Author: { + keyFields: ["name"], + }, + }, + }); + + const query: TypedDocumentNode<{ + currentlyReading: { + title: string; + isbn: string; + author: { + name: string; + }, + }, + }> = gql` + query { + currentlyReading { + title + isbn + author { + name + } + } + } + `; + + const currentlyReading = { + __typename: "Book", + isbn: "0374110034", + title: "Beowulf: A New Translation", + author: { + __typename: "Author", + name: "Maria Dahvana Headley", + }, + }; + + cache.writeQuery({ + query, + data: { + currentlyReading, + } + }); + + function read() { + return cache.readQuery({ query })!; + } + + const initialResult = read(); + + expect(cache.extract()).toMatchSnapshot(); + + expect(cache.modify({ + id: cache.identify({ + __typename: "Author", + name: "Maria Dahvana Headley", + }), + fields: { + name(_, { INVALIDATE }) { + return INVALIDATE; + }, + }, + })).toBe(false); // Nothing actually modified. + + const resultAfterAuthorInvalidation = read(); + expect(resultAfterAuthorInvalidation).not.toBe(initialResult); + expect(resultAfterAuthorInvalidation).toEqual(initialResult); + + expect(cache.modify({ + id: cache.identify({ + __typename: "Book", + isbn: "0374110034", + }), + // Invalidate all fields of the Book entity. + fields(_, { INVALIDATE }) { + return INVALIDATE; + }, + })).toBe(false); // Nothing actually modified. + + const resultAfterBookInvalidation = read(); + expect(resultAfterBookInvalidation).not.toBe(resultAfterAuthorInvalidation); + expect(resultAfterBookInvalidation).toEqual(resultAfterAuthorInvalidation); + expect(resultAfterBookInvalidation.currentlyReading.author).toEqual({ + __typename: "Author", + name: "Maria Dahvana Headley", + }); + expect( + resultAfterBookInvalidation.currentlyReading.author + ).toBe( + resultAfterAuthorInvalidation.currentlyReading.author + ); + }); + it("should allow deletion using details.DELETE", () => { const cache = new InMemoryCache({ typePolicies: { @@ -2495,4 +2594,259 @@ describe("ReactiveVar and makeVar", () => { }, }); }); + + it("should broadcast only once for multiple reads of same variable", () => { + const nameVar = makeVar("Ben"); + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + name() { + return nameVar(); + }, + }, + }, + }, + }); + + // TODO This should not be necessary, but cache.readQuery currently + // returns null if we read a query before writing any queries. + cache.restore({ + ROOT_QUERY: {} + }); + + const broadcast = cache["broadcastWatches"]; + let broadcastCount = 0; + cache["broadcastWatches"] = function () { + ++broadcastCount; + return broadcast.apply(this, arguments); + }; + + const query = gql` + query { + name1: name + name2: name + } + `; + + const watchDiffs: Cache.DiffResult[] = []; + cache.watch({ + query, + optimistic: true, + callback(diff) { + watchDiffs.push(diff); + }, + }); + + const benResult = cache.readQuery({ query }); + expect(benResult).toEqual({ + name1: "Ben", + name2: "Ben", + }); + + expect(watchDiffs).toEqual([]); + + expect(broadcastCount).toBe(0); + nameVar("Jenn"); + expect(broadcastCount).toBe(1); + + const jennResult = cache.readQuery({ query }); + expect(jennResult).toEqual({ + name1: "Jenn", + name2: "Jenn", + }); + + expect(watchDiffs).toEqual([ + { + complete: true, + result: { + name1: "Jenn", + name2: "Jenn", + }, + }, + ]); + + expect(broadcastCount).toBe(1); + nameVar("Hugh"); + expect(broadcastCount).toBe(2); + + const hughResult = cache.readQuery({ query }); + expect(hughResult).toEqual({ + name1: "Hugh", + name2: "Hugh", + }); + + expect(watchDiffs).toEqual([ + { + complete: true, + result: { + name1: "Jenn", + name2: "Jenn", + }, + }, + { + complete: true, + result: { + name1: "Hugh", + name2: "Hugh", + }, + }, + ]); + }); +}); + +describe('TypedDocumentNode', () => { + type Book = { + isbn?: string; + title: string; + author: { + name: string; + }; + }; + + const query: TypedDocumentNode< + { book: Book }, + { isbn: string } + > = gql`query GetBook($isbn: String!) { + book(isbn: $isbn) { + title + author { + name + } + } + }`; + + const fragment: TypedDocumentNode = gql` + fragment TitleAndAuthor on Book { + title + isbn + author { + name + } + } + `; + + it('should determine Data and Variables types of {write,read}{Query,Fragment}', () => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + book(existing, { args, toReference }) { + return existing ?? (args && toReference({ + __typename: "Book", + isbn: args.isbn, + })); + } + } + }, + + Book: { + keyFields: ["isbn"], + }, + + Author: { + keyFields: ["name"], + }, + }, + }); + + // We need to define these objects separately from calling writeQuery, + // because passing them directly to writeQuery will trigger excess property + // warnings due to the extra __typename and isbn fields. Internally, we + // almost never pass object literals to writeQuery or writeFragment, so + // excess property checks should not be a problem in practice. + const jcmAuthor = { + __typename: "Author", + name: "John C. Mitchell", + }; + + const ffplBook = { + __typename: "Book", + isbn: "0262133210", + title: "Foundations for Programming Languages", + author: jcmAuthor, + }; + + const ffplVariables = { + isbn: "0262133210", + }; + + cache.writeQuery({ + query, + variables: ffplVariables, + data: { + book: ffplBook, + }, + }); + + expect(cache.extract()).toMatchSnapshot(); + + const ffplQueryResult = cache.readQuery({ + query, + variables: ffplVariables, + }); + + if (ffplQueryResult === null) throw new Error("null result"); + expect(ffplQueryResult.book.isbn).toBeUndefined(); + expect(ffplQueryResult.book.author.name).toBe(jcmAuthor.name); + expect(ffplQueryResult).toEqual({ + book: { + __typename: "Book", + title: "Foundations for Programming Languages", + author: { + __typename: "Author", + name: "John C. Mitchell", + }, + }, + }); + + const sicpBook = { + __typename: "Book", + isbn: "0262510871", + title: "Structure and Interpretation of Computer Programs", + author: { + __typename: "Author", + name: "Harold Abelson", + }, + }; + + const sicpRef = cache.writeFragment({ + fragment, + data: sicpBook, + }); + + expect(isReference(sicpRef)).toBe(true); + expect(cache.extract()).toMatchSnapshot(); + + const ffplFragmentResult = cache.readFragment({ + fragment, + id: cache.identify(ffplBook), + }); + if (ffplFragmentResult === null) throw new Error("null result"); + expect(ffplFragmentResult.title).toBe(ffplBook.title); + expect(ffplFragmentResult.author.name).toBe(ffplBook.author.name); + expect(ffplFragmentResult).toEqual(ffplBook); + + // This uses the read function for the Query.book field. + const sicpReadResult = cache.readQuery({ + query, + variables: { + isbn: sicpBook.isbn, + }, + }); + if (sicpReadResult === null) throw new Error("null result"); + expect(sicpReadResult.book.isbn).toBeUndefined(); + expect(sicpReadResult.book.title).toBe(sicpBook.title); + expect(sicpReadResult.book.author.name).toBe(sicpBook.author.name); + expect(sicpReadResult).toEqual({ + book: { + __typename: "Book", + title: "Structure and Interpretation of Computer Programs", + author: { + __typename: "Author", + name: "Harold Abelson", + }, + }, + }); + }); }); diff --git a/src/cache/inmemory/__tests__/entityStore.ts b/src/cache/inmemory/__tests__/entityStore.ts index 3d97772502c..052c05218ca 100644 --- a/src/cache/inmemory/__tests__/entityStore.ts +++ b/src/cache/inmemory/__tests__/entityStore.ts @@ -1091,14 +1091,16 @@ describe('EntityStore', () => { new MissingFieldError( 'Can\'t find field \'hobby\' on Author:{"name":"Ted Chiang"} object', ["authorOfBook", "hobby"], - expect.anything(), - expect.anything(), + expect.anything(), // query + false, // clientOnly + expect.anything(), // variables ), new MissingFieldError( 'Can\'t find field \'publisherOfBook\' on ROOT_QUERY object', ["publisherOfBook"], - expect.anything(), - expect.anything(), + expect.anything(), // query + false, // clientOnly + expect.anything(), // variables ), ], }); @@ -1674,8 +1676,9 @@ describe('EntityStore', () => { new MissingFieldError( "Dangling reference to missing Author:2 object", ["book", "author"], - expect.anything(), - expect.anything(), + expect.anything(), // query + false, // clientOnly + expect.anything(), // variables ), ]; @@ -1943,8 +1946,9 @@ describe('EntityStore', () => { new MissingFieldError( 'Can\'t find field \'title\' on Book:{"isbn":"031648637X"} object', ["book", "title"], - expect.anything(), - expect.anything(), + expect.anything(), // query + false, // clientOnly + expect.anything(), // variables ), ], }); diff --git a/src/cache/inmemory/__tests__/fragmentMatcher.ts b/src/cache/inmemory/__tests__/fragmentMatcher.ts index 279a42ea902..d4062f64d97 100644 --- a/src/cache/inmemory/__tests__/fragmentMatcher.ts +++ b/src/cache/inmemory/__tests__/fragmentMatcher.ts @@ -1,6 +1,8 @@ import gql from 'graphql-tag'; import { InMemoryCache } from '../inMemoryCache'; +import { visit, FragmentDefinitionNode } from 'graphql'; +import { hasOwn } from '../helpers'; describe('fragment matching', () => { it('can match exact types with or without possibleTypes', () => { @@ -222,4 +224,340 @@ describe('fragment matching', () => { cache.writeQuery({ query, data }); expect(cache.readQuery({ query })).toEqual(data); }); + +}); + +describe("policies.fragmentMatches", () => { + const warnings: any[] = []; + const { warn } = console; + + beforeEach(() => { + warnings.length = 0; + console.warn = function (message: any) { + warnings.push(message); + }; + }); + + afterEach(() => { + console.warn = warn; + }); + + it("can infer fuzzy subtypes heuristically", () => { + const cache = new InMemoryCache({ + possibleTypes: { + A: ["B", "C"], + B: ["D"], + C: ["[E-Z]"], + }, + }); + + const fragments = gql` + fragment FragA on A { a } + fragment FragB on B { b } + fragment FragC on C { c } + fragment FragD on D { d } + fragment FragE on E { e } + fragment FragF on F { f } + `; + + function checkTypes( + expected: Record>, + ) { + const checked = new Set(); + + visit(fragments, { + FragmentDefinition(frag) { + function check(typename: string, result: boolean) { + if (result !== cache.policies.fragmentMatches(frag, typename)) { + fail(`fragment ${ + frag.name.value + } should${result ? "" : " not"} have matched typename ${typename}`); + } + } + + const supertype = frag.typeCondition.name.value; + expect("ABCDEF".split("")).toContain(supertype); + + if (hasOwn.call(expected, supertype)) { + Object.keys(expected[supertype]).forEach(subtype => { + check(subtype, expected[supertype][subtype]); + }); + + checked.add(frag); + } + }, + }); + + return checked; + } + + expect(checkTypes({ + A: { + A: true, + B: true, + C: true, + D: true, + E: false, + F: false, + G: false, + }, + B: { + A: false, + B: true, + C: false, + D: true, + E: false, + F: false, + G: false, + }, + C: { + A: false, + B: false, + C: true, + D: false, + E: false, + F: false, + G: false, + }, + D: { + A: false, + B: false, + C: false, + D: true, + E: false, + F: false, + G: false, + }, + E: { + A: false, + B: false, + C: false, + D: false, + E: true, + F: false, + G: false, + }, + F: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: true, + G: false, + }, + G: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: false, + G: true, + }, + }).size).toBe("ABCDEF".length); + + cache.writeQuery({ + query: gql` + query { + objects { + ...FragC + } + } + ${fragments} + `, + data: { + objects: [ + { __typename: "E", c: "ce" }, + { __typename: "F", c: "cf" }, + { __typename: "G", c: "cg" }, + // The /[E-Z]/ subtype pattern specified for the C supertype + // must match the entire subtype string. + { __typename: "TooLong", c: "nope" }, + // The H typename matches the regular expression for C, but it + // does not pass the heuristic test of having all the fields + // expected if FragC matched. + { __typename: "H", h: "not c" }, + ], + }, + }); + + expect(warnings).toEqual([ + "Inferring subtype E of supertype C", + "Inferring subtype F of supertype C", + "Inferring subtype G of supertype C", + // Note that TooLong is not inferred here. + ]); + + expect(checkTypes({ + A: { + A: true, + B: true, + C: true, + D: true, + E: true, + F: true, + G: true, + H: false, + }, + B: { + A: false, + B: true, + C: false, + D: true, + E: false, + F: false, + G: false, + H: false, + }, + C: { + A: false, + B: false, + C: true, + D: false, + E: true, + F: true, + G: true, + H: false, + }, + D: { + A: false, + B: false, + C: false, + D: true, + E: false, + F: false, + G: false, + H: false, + }, + E: { + A: false, + B: false, + C: false, + D: false, + E: true, + F: false, + G: false, + H: false, + }, + F: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: true, + G: false, + H: false, + }, + G: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: true, + G: true, + H: false, + }, + }).size).toBe("ABCDEF".length); + + expect(cache.extract()).toMatchSnapshot(); + + // Now add the TooLong subtype of C explicitly. + cache.policies.addPossibleTypes({ + C: ["TooLong"], + }); + + expect(checkTypes({ + A: { + A: true, + B: true, + C: true, + D: true, + E: true, + F: true, + G: true, + TooLong: true, + H: false, + }, + B: { + A: false, + B: true, + C: false, + D: true, + E: false, + F: false, + G: false, + TooLong: false, + H: false, + }, + C: { + A: false, + B: false, + C: true, + D: false, + E: true, + F: true, + G: true, + TooLong: true, + H: false, + }, + D: { + A: false, + B: false, + C: false, + D: true, + E: false, + F: false, + G: false, + TooLong: false, + H: false, + }, + E: { + A: false, + B: false, + C: false, + D: false, + E: true, + F: false, + G: false, + TooLong: false, + H: false, + }, + F: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: true, + G: false, + TooLong: false, + H: false, + }, + G: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: true, + G: true, + TooLong: false, + H: false, + }, + H: { + A: false, + B: false, + C: false, + D: false, + E: false, + F: false, + G: false, + TooLong: false, + H: true, + }, + }).size).toBe("ABCDEF".length); + }); }); diff --git a/src/cache/inmemory/__tests__/helpers.ts b/src/cache/inmemory/__tests__/helpers.ts index 98c428b89be..3a518ff5027 100644 --- a/src/cache/inmemory/__tests__/helpers.ts +++ b/src/cache/inmemory/__tests__/helpers.ts @@ -1,6 +1,11 @@ -import { NormalizedCache, NormalizedCacheObject } from "../types"; +import { + NormalizedCache, + NormalizedCacheObject, + DiffQueryAgainstStoreOptions, +} from "../types"; import { EntityStore } from "../entityStore"; import { InMemoryCache } from "../inMemoryCache"; +import { StoreReader } from "../readFromStore"; import { StoreWriter, WriteToStoreOptions } from "../writeToStore"; export function defaultNormalizedCacheFactory( @@ -20,6 +25,16 @@ extends Omit { store?: NormalizedCache; } +export function readQueryFromStore( + reader: StoreReader, + options: DiffQueryAgainstStoreOptions, +) { + return reader.diffQueryAgainstStore({ + ...options, + returnPartialData: false, + }).result; +} + export function writeQueryToStore( options: WriteQueryToStoreOptions, ): NormalizedCache { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts index 1c75e637635..4c7aef0ab35 100644 --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -2,12 +2,13 @@ import gql from "graphql-tag"; import { InMemoryCache } from "../inMemoryCache"; import { ReactiveVar, makeVar } from "../reactiveVars"; -import { Reference, StoreObject, ApolloClient, NetworkStatus } from "../../../core"; +import { Reference, StoreObject, ApolloClient, NetworkStatus, TypedDocumentNode } from "../../../core"; import { MissingFieldError } from "../.."; import { relayStylePagination } from "../../../utilities"; import { MockLink } from '../../../utilities/testing/mocking/mockLink'; import subscribeAndCount from '../../../utilities/testing/subscribeAndCount'; import { itAsync } from '../../../utilities/testing/itAsync'; +import { FieldPolicy, StorageType } from "../policies"; function reverse(s: string) { return s.split("").reverse().join(""); @@ -736,8 +737,8 @@ describe("type policies", function () { }); }); - it("can use stable storage in read functions", function () { - const storageSet = new Set | null>(); + it("can use options.storage in read functions", function () { + const storageSet = new Set>(); const cache = new InMemoryCache({ typePolicies: { @@ -745,8 +746,8 @@ describe("type policies", function () { fields: { result(existing, { args, storage }) { storageSet.add(storage); - if (storage?.result) return storage.result; - return storage!.result = compute(); + if (storage.result) return storage.result; + return storage.result = compute(); }, }, }, @@ -840,9 +841,7 @@ describe("type policies", function () { // Clear the cached results. storageSet.forEach(storage => { - if (storage) { - delete storage.result; - } + delete storage.result; }); const result3 = cache.readQuery({ @@ -965,16 +964,16 @@ describe("type policies", function () { fields: { result: { read(_, { storage }) { - if (!storage!.jobName) { - storage!.jobName = makeVar(undefined); + if (!storage.jobName) { + storage.jobName = makeVar(undefined); } - return storage!.jobName(); + return storage.jobName(); }, merge(_, incoming: string, { storage }) { - if (storage!.jobName) { - storage!.jobName(incoming); + if (storage.jobName) { + storage.jobName(incoming); } else { - storage!.jobName = makeVar(incoming); + storage.jobName = makeVar(incoming); } }, }, @@ -1040,8 +1039,9 @@ describe("type policies", function () { return new MissingFieldError( `Can't find field 'result' on Job:{"name":"Job #${jobNumber}"} object`, ["jobs", jobNumber - 1, "result"], - expect.anything(), - expect.anything(), + expect.anything(), // query + false, // clientOnly + expect.anything(), // variables ); } @@ -1228,6 +1228,157 @@ describe("type policies", function () { }); }); + it("read, merge, and modify functions can access options.storage", function () { + const storageByFieldName = new Map(); + + function recordStorageOnce(fieldName: string, storage: StorageType) { + if (storageByFieldName.has(fieldName)) { + expect(storageByFieldName.get(fieldName)).toBe(storage); + } else { + storageByFieldName.set(fieldName, storage); + } + } + + function makeFieldPolicy(): FieldPolicy { + return { + read(existing = 0, { fieldName, storage }) { + storage.readCount = (storage.readCount|0) + 1; + recordStorageOnce(fieldName, storage); + return existing; + }, + merge(existing = 0, incoming, { fieldName, storage }) { + storage.mergeCount = (storage.mergeCount|0) + 1; + recordStorageOnce(fieldName, storage); + return existing + incoming; + }, + }; + }; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + mergeRead: makeFieldPolicy(), + mergeModify: makeFieldPolicy(), + mergeReadModify: makeFieldPolicy(), + }, + }, + }, + }); + + const query: TypedDocumentNode<{ + mergeRead: number; + mergeModify: number; + mergeReadModify: number; + }> = gql` + query { + mergeRead + mergeModify + mergeReadModify + } + `; + + cache.writeQuery({ + query, + data: { + mergeRead: 1, + mergeModify: 10, + mergeReadModify: 100, + }, + }); + + expect(storageByFieldName.get("mergeRead")).toEqual({ + mergeCount: 1, + }); + + expect(storageByFieldName.get("mergeModify")).toEqual({ + mergeCount: 1, + }); + + expect(storageByFieldName.get("mergeReadModify")).toEqual({ + mergeCount: 1, + }); + + expect(cache.readQuery({ + query: gql`query { mergeRead mergeReadModify }`, + })).toEqual({ + mergeRead: 1, + mergeReadModify: 100, + }); + + expect(storageByFieldName.get("mergeRead")).toEqual({ + mergeCount: 1, + readCount: 1, + }); + + expect(storageByFieldName.get("mergeModify")).toEqual({ + mergeCount: 1, + }); + + expect(storageByFieldName.get("mergeReadModify")).toEqual({ + mergeCount: 1, + readCount: 1, + }); + + expect(cache.modify({ + fields: { + mergeModify(value, { fieldName, storage }) { + storage.modifyCount = (storage.modifyCount|0) + 1; + recordStorageOnce(fieldName, storage); + return value + 1; + }, + mergeReadModify(value, { fieldName, storage }) { + storage.modifyCount = (storage.modifyCount|0) + 1; + recordStorageOnce(fieldName, storage); + return value + 1; + }, + }, + })).toBe(true); + + expect(cache.extract()).toMatchSnapshot(); + + expect(storageByFieldName.get("mergeRead")).toEqual({ + mergeCount: 1, + readCount: 1, + }); + + expect(storageByFieldName.get("mergeModify")).toEqual({ + mergeCount: 1, + modifyCount: 1, + }); + + expect(storageByFieldName.get("mergeReadModify")).toEqual({ + mergeCount: 1, + readCount: 1, + modifyCount: 1, + }); + + expect(cache.readQuery({ query })).toEqual({ + mergeRead: 1, + mergeModify: 11, + mergeReadModify: 101, + }); + + expect(storageByFieldName.get("mergeRead")).toEqual({ + mergeCount: 1, + readCount: 2, + }); + + expect(storageByFieldName.get("mergeModify")).toEqual({ + mergeCount: 1, + modifyCount: 1, + readCount: 1, + }); + + expect(storageByFieldName.get("mergeReadModify")).toEqual({ + mergeCount: 1, + readCount: 2, + modifyCount: 1, + }); + + expect(cache.extract()).toMatchSnapshot(); + }); + it("merge functions can deduplicate items using readField", function () { const cache = new InMemoryCache({ typePolicies: { diff --git a/src/cache/inmemory/__tests__/readFromStore.ts b/src/cache/inmemory/__tests__/readFromStore.ts index ce0a2bfb09a..fc1f02a9f8b 100644 --- a/src/cache/inmemory/__tests__/readFromStore.ts +++ b/src/cache/inmemory/__tests__/readFromStore.ts @@ -6,7 +6,8 @@ import { StoreObject } from '../types'; import { StoreReader } from '../readFromStore'; import { makeReference, InMemoryCache, Reference, isReference } from '../../../core'; import { Cache } from '../../core/types/Cache'; -import { defaultNormalizedCacheFactory } from './helpers'; +import { MissingFieldError } from '../../core/types/common'; +import { defaultNormalizedCacheFactory, readQueryFromStore } from './helpers'; import { withError } from './diffAgainstStore'; describe('reading from the store', () => { @@ -32,7 +33,7 @@ describe('reading from the store', () => { } as StoreObject, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -74,7 +75,7 @@ describe('reading from the store', () => { it('rejects malformed queries', () => { expect(() => { - reader.readQueryFromStore({ + readQueryFromStore(reader, { store: defaultNormalizedCacheFactory(), query: gql` query { @@ -89,7 +90,7 @@ describe('reading from the store', () => { }).toThrowError(/2 operations/); expect(() => { - reader.readQueryFromStore({ + readQueryFromStore(reader, { store: defaultNormalizedCacheFactory(), query: gql` fragment x on y { @@ -112,7 +113,7 @@ describe('reading from the store', () => { ROOT_QUERY: result, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` query { @@ -154,7 +155,7 @@ describe('reading from the store', () => { }, }); - const result = reader.readQueryFromStore({ + const result = readQueryFromStore(reader, { store, query, variables, @@ -187,7 +188,7 @@ describe('reading from the store', () => { }, }); - const result = reader.readQueryFromStore({ + const result = readQueryFromStore(reader, { store, query, }); @@ -227,7 +228,7 @@ describe('reading from the store', () => { }, }); - const result = reader.readQueryFromStore({ + const result = readQueryFromStore(reader, { store, query, variables, @@ -262,7 +263,7 @@ describe('reading from the store', () => { abcde: result.nestedObj, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -323,7 +324,7 @@ describe('reading from the store', () => { abcdef: result.deepNestedObj as StoreObject, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -400,7 +401,7 @@ describe('reading from the store', () => { ROOT_QUERY: result, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -451,7 +452,7 @@ describe('reading from the store', () => { ROOT_QUERY: result, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -503,7 +504,7 @@ describe('reading from the store', () => { abcde: result.nestedArray[1], }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -544,7 +545,7 @@ describe('reading from the store', () => { const store = defaultNormalizedCacheFactory({ ROOT_QUERY: result }); expect(() => { - reader.readQueryFromStore({ + readQueryFromStore(reader, { store, query: gql` { @@ -556,6 +557,74 @@ describe('reading from the store', () => { }).toThrowError(/Can't find field 'missingField' on ROOT_QUERY object/); }); + it('distinguishes between missing @client and non-@client fields', () => { + const query = gql` + query { + normal { + present @client + missing + } + clientOnly @client { + present + missing + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + normal() { + return { present: "here" }; + }, + clientOnly() { + return { present: "also here" }; + }, + }, + }, + }, + }); + + const { result, complete, missing } = cache.diff({ + query, + optimistic: true, + returnPartialData: true, + }); + + expect(complete).toBe(false); + + expect(result).toEqual({ + normal: { + present: "here", + }, + clientOnly: { + present: "also here", + }, + }); + + expect(missing).toEqual([ + new MissingFieldError( + `Can't find field 'missing' on object { + "present": "here" +}`, + ["normal", "missing"], + query, + false, // clientOnly + {}, // variables + ), + new MissingFieldError( + `Can't find field 'missing' on object { + "present": "also here" +}`, + ["clientOnly", "missing"], + query, + true, // clientOnly + {}, // variables + ), + ]); + }); + it('runs a nested query where the reference is null', () => { const result: any = { id: 'abcd', @@ -571,7 +640,7 @@ describe('reading from the store', () => { }) as StoreObject, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -606,7 +675,7 @@ describe('reading from the store', () => { ROOT_QUERY: result, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -638,7 +707,7 @@ describe('reading from the store', () => { ROOT_QUERY: result, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -693,7 +762,7 @@ describe('reading from the store', () => { abcdef: data.deepNestedObj as StoreObject, }); - const queryResult1 = reader.readQueryFromStore({ + const queryResult1 = readQueryFromStore(reader, { store, rootId: 'abcde', query: gql` @@ -721,7 +790,7 @@ describe('reading from the store', () => { }, }); - const queryResult2 = reader.readQueryFromStore({ + const queryResult2 = readQueryFromStore(reader, { store, rootId: 'abcdef', query: gql` @@ -751,7 +820,7 @@ describe('reading from the store', () => { }, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -799,7 +868,7 @@ describe('reading from the store', () => { }, }); - const queryResult = reader.readQueryFromStore({ + const queryResult = readQueryFromStore(reader, { store, query: gql` { @@ -854,7 +923,7 @@ describe('reading from the store', () => { }); expect(() => { - reader.readQueryFromStore({ + readQueryFromStore(reader, { store, query: gql` { @@ -870,7 +939,7 @@ describe('reading from the store', () => { ); expect( - reader.readQueryFromStore({ + readQueryFromStore(reader, { store, query: gql` { diff --git a/src/cache/inmemory/__tests__/roundtrip.ts b/src/cache/inmemory/__tests__/roundtrip.ts index bce2ff62c45..a83590f2e9f 100644 --- a/src/cache/inmemory/__tests__/roundtrip.ts +++ b/src/cache/inmemory/__tests__/roundtrip.ts @@ -6,7 +6,7 @@ import { EntityStore } from '../entityStore'; import { StoreReader } from '../readFromStore'; import { StoreWriter } from '../writeToStore'; import { InMemoryCache } from '../inMemoryCache'; -import { writeQueryToStore } from './helpers'; +import { writeQueryToStore, readQueryFromStore } from './helpers'; function assertDeeplyFrozen(value: any, stack: any[] = []) { if (value !== null && typeof value === 'object' && stack.indexOf(value) < 0) { @@ -43,17 +43,17 @@ function storeRoundtrip(query: DocumentNode, result: any, variables = {}) { variables, }; - const reconstructedResult = reader.readQueryFromStore(readOptions); + const reconstructedResult = readQueryFromStore(reader, readOptions); expect(reconstructedResult).toEqual(result); // Make sure the result is identical if we haven't written anything new // to the store. https://github.com/apollographql/apollo-client/pull/3394 expect(store).toBeInstanceOf(EntityStore); - expect(reader.readQueryFromStore(readOptions)).toBe(reconstructedResult); + expect(readQueryFromStore(reader, readOptions)).toBe(reconstructedResult); - const immutableResult = reader.readQueryFromStore(readOptions); + const immutableResult = readQueryFromStore(reader, readOptions); expect(immutableResult).toEqual(reconstructedResult); - expect(reader.readQueryFromStore(readOptions)).toBe(immutableResult); + expect(readQueryFromStore(reader, readOptions)).toBe(immutableResult); if (process.env.NODE_ENV !== 'production') { try { // Note: this illegal assignment will only throw in strict mode, but that's @@ -80,7 +80,7 @@ function storeRoundtrip(query: DocumentNode, result: any, variables = {}) { `, }); - const deletedRootResult = reader.readQueryFromStore(readOptions); + const deletedRootResult = readQueryFromStore(reader, readOptions); expect(deletedRootResult).toEqual(result); if (deletedRootResult === reconstructedResult) { diff --git a/src/cache/inmemory/__tests__/writeToStore.ts b/src/cache/inmemory/__tests__/writeToStore.ts index 51ca52dcc32..668f299e29e 100644 --- a/src/cache/inmemory/__tests__/writeToStore.ts +++ b/src/cache/inmemory/__tests__/writeToStore.ts @@ -2326,6 +2326,19 @@ describe('writing to the store', () => { }); }); + it("should warn if it cannot identify the result object", () => { + const cache = new InMemoryCache; + + expect(() => { + cache.writeFragment({ + fragment: gql`fragment Count on Counter { count }`, + data: { + count: 1, + }, + }); + }).toThrowError(/Could not identify object/); + }); + it('user objects should be able to have { __typename: "Subscription" }', () => { const cache = new InMemoryCache({ typePolicies: { diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index 6f18eddada9..c2c135f08da 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -13,13 +13,12 @@ import { } from '../../utilities'; import { NormalizedCache, NormalizedCacheObject } from './types'; import { hasOwn, fieldNameFromStoreName } from './helpers'; -import { Policies } from './policies'; +import { Policies, StorageType } from './policies'; import { Cache } from '../core/types/Cache'; import { SafeReadonly, Modifier, Modifiers, - ReadFieldFunction, ReadFieldOptions, ToReferenceFunction, CanReadFunction, @@ -27,6 +26,7 @@ import { const DELETE: any = Object.create(null); const delModifier: Modifier = () => DELETE; +const INVALIDATE: any = Object.create(null); export abstract class EntityStore implements NormalizedCache { protected data: NormalizedCacheObject = Object.create(null); @@ -127,16 +127,23 @@ export abstract class EntityStore implements NormalizedCache { let needToMerge = false; let allDeleted = true; - const readField: ReadFieldFunction = ( - fieldNameOrOptions: string | ReadFieldOptions, - from?: StoreObject | Reference, - ) => this.policies.readField( - typeof fieldNameOrOptions === "string" ? { - fieldName: fieldNameOrOptions, - from: from || makeReference(dataId), - } : fieldNameOrOptions, - { store: this }, - ); + const sharedDetails = { + DELETE, + INVALIDATE, + isReference, + toReference: this.toReference, + canRead: this.canRead, + readField: ( + fieldNameOrOptions: string | ReadFieldOptions, + from?: StoreObject | Reference, + ) => this.policies.readField( + typeof fieldNameOrOptions === "string" ? { + fieldName: fieldNameOrOptions, + from: from || makeReference(dataId), + } : fieldNameOrOptions, + { store: this }, + ), + }; Object.keys(storeObject).forEach(storeFieldName => { const fieldName = fieldNameFromStoreName(storeFieldName); @@ -148,19 +155,20 @@ export abstract class EntityStore implements NormalizedCache { if (modify) { let newValue = modify === delModifier ? DELETE : modify(maybeDeepFreeze(fieldValue), { - DELETE, + ...sharedDetails, fieldName, storeFieldName, - isReference, - toReference: this.toReference, - canRead: this.canRead, - readField, + storage: this.getStorage(dataId, storeFieldName), }); - if (newValue === DELETE) newValue = void 0; - if (newValue !== fieldValue) { - changedFields[storeFieldName] = newValue; - needToMerge = true; - fieldValue = newValue; + if (newValue === INVALIDATE) { + this.group.dirty(dataId, storeFieldName); + } else { + if (newValue === DELETE) newValue = void 0; + if (newValue !== fieldValue) { + changedFields[storeFieldName] = newValue; + needToMerge = true; + fieldValue = newValue; + } } } if (fieldValue !== void 0) { @@ -248,6 +256,11 @@ export abstract class EntityStore implements NormalizedCache { } } + public abstract getStorage( + idOrObj: string | StoreObject, + storeFieldName: string, + ): StorageType; + // Maps root entity IDs to the number of times they have been retained, minus // the number of times they have been released. Retained entities keep other // entities they reference (even indirectly) from being garbage collected. @@ -467,6 +480,14 @@ export namespace EntityStore { // Never remove the root layer. return this; } + + public readonly storageTrie = new KeyTrie(canUseWeakMap); + public getStorage( + idOrObj: string | StoreObject, + storeFieldName: string, + ): StorageType { + return this.storageTrie.lookup(idOrObj, storeFieldName); + } } } @@ -531,6 +552,13 @@ class Layer extends EntityStore { ...super.findChildRefIds(dataId), } : fromParent; } + + public getStorage( + idOrObj: string | StoreObject, + storeFieldName: string, + ): StorageType { + return this.parent.getStorage(idOrObj, storeFieldName); + } } function storeObjectReconciler( diff --git a/src/cache/inmemory/helpers.ts b/src/cache/inmemory/helpers.ts index 9cdd09ae12c..47d9ee28e5c 100644 --- a/src/cache/inmemory/helpers.ts +++ b/src/cache/inmemory/helpers.ts @@ -1,4 +1,4 @@ -import { FieldNode } from 'graphql'; +import { FieldNode, SelectionSetNode } from 'graphql'; import { NormalizedCache } from './types'; import { @@ -9,6 +9,8 @@ import { isField, DeepMerger, ReconcilerFunction, + resultKeyNameFromField, + shouldInclude, } from '../../utilities'; export const hasOwn = Object.prototype.hasOwnProperty; @@ -22,12 +24,39 @@ export function getTypenameFromStoreObject( : objectOrReference && objectOrReference.__typename; } -const FieldNamePattern = /^[_A-Za-z0-9]+/; +export const TypeOrFieldNameRegExp = /^[_a-z][_0-9a-z]*/i; + export function fieldNameFromStoreName(storeFieldName: string): string { - const match = storeFieldName.match(FieldNamePattern); + const match = storeFieldName.match(TypeOrFieldNameRegExp); return match ? match[0] : storeFieldName; } +export function selectionSetMatchesResult( + selectionSet: SelectionSetNode, + result: Record, + variables?: Record, +): boolean { + if (result && typeof result === "object") { + return Array.isArray(result) + ? result.every(item => selectionSetMatchesResult(selectionSet, item, variables)) + : selectionSet.selections.every(field => { + if (isField(field) && shouldInclude(field, variables)) { + const key = resultKeyNameFromField(field); + return hasOwn.call(result, key) && + (!field.selectionSet || + selectionSetMatchesResult(field.selectionSet, result[key], variables)); + } + // If the selection has been skipped with @skip(true) or + // @include(false), it should not count against the matching. If + // the selection is not a field, it must be a fragment (inline or + // named). We will determine if selectionSetMatchesResult for that + // fragment when we get to it, so for now we return true. + return true; + }); + } + return false; +} + // Invoking merge functions needs to happen after processSelectionSet has // finished, but requires information that is more readily available // during processSelectionSet, so processSelectionSet embeds special diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts index 1260e83e1d2..aa988941e1d 100644 --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -110,13 +110,14 @@ export class InMemoryCache extends ApolloCache { if (typeof options.rootId === 'string' && !store.has(options.rootId)) { return null; } - return this.storeReader.readQueryFromStore({ + return this.storeReader.diffQueryAgainstStore({ store, query: options.query, variables: options.variables, rootId: options.rootId, config: this.config, - }) || null; + returnPartialData: false, + }).result || null; } public write(options: Cache.WriteOptions): Reference | undefined { @@ -278,11 +279,14 @@ export class InMemoryCache extends ApolloCache { } }; + let fromOptimisticTransaction = false; + if (typeof optimisticId === 'string') { // Note that there can be multiple layers with the same optimisticId. // When removeOptimistic(id) is called for that id, all matching layers // will be removed, and the remaining layers will be reapplied. this.optimisticData = this.optimisticData.addLayer(optimisticId, perform); + fromOptimisticTransaction = true; } else if (optimisticId === null) { // Ensure both this.data and this.optimisticData refer to the root // (non-optimistic) layer of the cache during the transaction. Note @@ -297,7 +301,7 @@ export class InMemoryCache extends ApolloCache { } // This broadcast does nothing if this.txCount > 0. - this.broadcastWatches(); + this.broadcastWatches(fromOptimisticTransaction); } public transformDocument(document: DocumentNode): DocumentNode { @@ -316,14 +320,17 @@ export class InMemoryCache extends ApolloCache { return document; } - protected broadcastWatches() { + protected broadcastWatches(fromOptimisticTransaction?: boolean) { if (!this.txCount) { - this.watches.forEach(c => this.maybeBroadcastWatch(c)); + this.watches.forEach(c => this.maybeBroadcastWatch(c, fromOptimisticTransaction)); } } - private maybeBroadcastWatch = wrap((c: Cache.WatchOptions) => { - return this.broadcastWatch.call(this, c); + private maybeBroadcastWatch = wrap(( + c: Cache.WatchOptions, + fromOptimisticTransaction?: boolean, + ) => { + return this.broadcastWatch.call(this, c, !!fromOptimisticTransaction); }, { makeCacheKey: (c: Cache.WatchOptions) => { // Return a cache key (thus enabling result caching) only if we're @@ -354,7 +361,10 @@ export class InMemoryCache extends ApolloCache { // simpler to check for changes after recomputing a result but before // broadcasting it, but this wrapping approach allows us to skip both // the recomputation and the broadcast, in most cases. - private broadcastWatch(c: Cache.WatchOptions) { + private broadcastWatch( + c: Cache.WatchOptions, + fromOptimisticTransaction: boolean, + ) { // First, invalidate any other maybeBroadcastWatch wrapper functions // currently depending on this Cache.WatchOptions object (including // the one currently calling broadcastWatch), so they will be included @@ -372,10 +382,16 @@ export class InMemoryCache extends ApolloCache { // changed since they were previously delivered. this.watchDep(c); - c.callback(this.diff({ + const diff = this.diff({ query: c.query, variables: c.variables, optimistic: c.optimistic, - })); + }); + + if (c.optimistic && fromOptimisticTransaction) { + diff.fromOptimisticTransaction = true; + } + + c.callback(diff); } } diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts index deb642f55c0..d36974b0845 100644 --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -29,6 +29,8 @@ import { FieldValueToBeMerged, isFieldValueToBeMerged, storeValueIsStoreObject, + selectionSetMatchesResult, + TypeOrFieldNameRegExp, } from './helpers'; import { cacheSlot } from './reactiveVars'; import { InMemoryCache } from './inMemoryCache'; @@ -102,7 +104,7 @@ export type FieldPolicy< merge?: FieldMergeFunction | boolean; }; -type StorageType = Record; +export type StorageType = Record; function argsFromFieldSpecifier(spec: FieldSpecifier) { return spec.args !== void 0 ? spec.args : @@ -138,7 +140,7 @@ export interface FieldFunctionOptions< // A handy place to put field-specific data that you want to survive // across multiple read function calls. Useful for field-level caching, // if your read function does any expensive work. - storage: StorageType | null; + storage: StorageType; cache: InMemoryCache; @@ -227,7 +229,6 @@ export class Policies { private typePolicies: { [__typename: string]: { keyFn?: KeyFieldsFunction; - subtypes?: Set; fields?: { [fieldName: string]: { keyFn?: KeyArgsFunction; @@ -238,6 +239,18 @@ export class Policies { }; } = Object.create(null); + // Map from subtype names to sets of supertype names. Note that this + // representation inverts the structure of possibleTypes (whose keys are + // supertypes and whose values are arrays of subtypes) because it tends + // to be much more efficient to search upwards than downwards. + private supertypeMap = new Map>(); + + // Any fuzzy subtypes specified by possibleTypes will be converted to + // RegExp objects and recorded here. Every key of this map can also be + // found in supertypeMap. In many cases this Map will be empty, which + // means no fuzzy subtype checking will happen in fragmentMatches. + private fuzzySubtypes = new Map(); + public readonly cache: InMemoryCache; public readonly rootIdsByTypename: Record = Object.create(null); @@ -407,8 +420,19 @@ export class Policies { public addPossibleTypes(possibleTypes: PossibleTypesMap) { (this.usingPossibleTypes as boolean) = true; Object.keys(possibleTypes).forEach(supertype => { - const subtypeSet = this.getSubtypeSet(supertype, true); - possibleTypes[supertype].forEach(subtypeSet!.add, subtypeSet); + // Make sure all types have an entry in this.supertypeMap, even if + // their supertype set is empty, so we can return false immediately + // from policies.fragmentMatches for unknown supertypes. + this.getSupertypeSet(supertype, true); + + possibleTypes[supertype].forEach(subtype => { + this.getSupertypeSet(subtype, true)!.add(supertype); + const match = subtype.match(TypeOrFieldNameRegExp); + if (!match || match[0] !== subtype) { + // TODO Don't interpret just any invalid typename as a RegExp. + this.fuzzySubtypes.set(subtype, new RegExp(subtype)); + } + }); }); } @@ -422,17 +446,6 @@ export class Policies { } } - private getSubtypeSet( - supertype: string, - createIfMissing: boolean, - ): Set | undefined { - const policy = this.getTypePolicy(supertype, createIfMissing); - if (policy) { - return policy.subtypes || ( - createIfMissing ? policy.subtypes = new Set() : void 0); - } - } - private getFieldPolicy( typename: string | undefined, fieldName: string, @@ -453,9 +466,22 @@ export class Policies { } } + private getSupertypeSet( + subtype: string, + createIfMissing: boolean, + ): Set | undefined { + let supertypeSet = this.supertypeMap.get(subtype); + if (!supertypeSet && createIfMissing) { + this.supertypeMap.set(subtype, supertypeSet = new Set()); + } + return supertypeSet; + } + public fragmentMatches( fragment: InlineFragmentNode | FragmentDefinitionNode, typename: string | undefined, + result?: Record, + variables?: Record, ): boolean { if (!fragment.typeCondition) return true; @@ -464,20 +490,75 @@ export class Policies { if (!typename) return false; const supertype = fragment.typeCondition.name.value; + // Common case: fragment type condition and __typename are the same. if (typename === supertype) return true; - if (this.usingPossibleTypes) { - const workQueue = [this.getSubtypeSet(supertype, false)]; + if (this.usingPossibleTypes && + this.supertypeMap.has(supertype)) { + const typenameSupertypeSet = this.getSupertypeSet(typename, true)!; + const workQueue = [typenameSupertypeSet]; + const maybeEnqueue = (subtype: string) => { + const supertypeSet = this.getSupertypeSet(subtype, false); + if (supertypeSet && + supertypeSet.size && + workQueue.indexOf(supertypeSet) < 0) { + workQueue.push(supertypeSet); + } + }; + + // We need to check fuzzy subtypes only if we encountered fuzzy + // subtype strings in addPossibleTypes, and only while writing to + // the cache, since that's when selectionSetMatchesResult gives a + // strong signal of fragment matching. The StoreReader class calls + // policies.fragmentMatches without passing a result object, so + // needToCheckFuzzySubtypes is always false while reading. + let needToCheckFuzzySubtypes = !!(result && this.fuzzySubtypes.size); + let checkingFuzzySubtypes = false; + // It's important to keep evaluating workQueue.length each time through // the loop, because the queue can grow while we're iterating over it. for (let i = 0; i < workQueue.length; ++i) { - const subtypes = workQueue[i]; - if (subtypes) { - if (subtypes.has(typename)) return true; - subtypes.forEach(subtype => { - const subsubtypes = this.getSubtypeSet(subtype, false); - if (subsubtypes && workQueue.indexOf(subsubtypes) < 0) { - workQueue.push(subsubtypes); + const supertypeSet = workQueue[i]; + + if (supertypeSet.has(supertype)) { + if (!typenameSupertypeSet.has(supertype)) { + if (checkingFuzzySubtypes) { + invariant.warn(`Inferring subtype ${typename} of supertype ${supertype}`); + } + // Record positive results for faster future lookup. + // Unfortunately, we cannot safely cache negative results, + // because new possibleTypes data could always be added to the + // Policies class. + typenameSupertypeSet.add(supertype); + } + return true; + } + + supertypeSet.forEach(maybeEnqueue); + + if (needToCheckFuzzySubtypes && + // Start checking fuzzy subtypes only after exhausting all + // non-fuzzy subtypes (after the final iteration of the loop). + i === workQueue.length - 1 && + // We could wait to compare fragment.selectionSet to result + // after we verify the supertype, but this check is often less + // expensive than that search, and we will have to do the + // comparison anyway whenever we find a potential match. + selectionSetMatchesResult(fragment.selectionSet, result!, variables)) { + // We don't always need to check fuzzy subtypes (if no result + // was provided, or !this.fuzzySubtypes.size), but, when we do, + // we only want to check them once. + needToCheckFuzzySubtypes = false; + checkingFuzzySubtypes = true; + + // If we find any fuzzy subtypes that match typename, extend the + // workQueue to search through the supertypes of those fuzzy + // subtypes. Otherwise the for-loop will terminate and we'll + // return false below. + this.fuzzySubtypes.forEach((regExp, fuzzyString) => { + const match = typename.match(regExp); + if (match && match[0] === typename) { + maybeEnqueue(fuzzyString); } }); } @@ -528,8 +609,6 @@ export class Policies { : fieldName + ":" + storeFieldName; } - private storageTrie = new KeyTrie(true); - public readField( options: ReadFieldOptions, context: ReadMergeModifyContext, @@ -557,7 +636,7 @@ export class Policies { objectOrReference, options, context, - this.storageTrie.lookup( + context.store.getStorage( isReference(objectOrReference) ? objectOrReference.__ref : objectOrReference, @@ -598,17 +677,6 @@ export class Policies { const { merge } = this.getFieldPolicy( incoming.__typename, fieldName, false)!; - // If storage ends up null, that just means no options.storage object - // has ever been created for a read function for this field before, so - // there's nothing this merge function could do with options.storage - // that would help the read function do its work. Most merge functions - // will never need to worry about options.storage, but if you're reading - // this comment then you probably have good reasons for wanting to know - // esoteric details like these, you wizard, you. - const storage = storageKeys - ? this.storageTrie.lookupArray(storageKeys) - : null; - incoming = merge!(existing, incoming.__value, makeFieldFunctionOptions( this, // Unlike options.readField for read functions, we do not fall @@ -628,7 +696,9 @@ export class Policies { field, variables: context.variables }, context, - storage, + storageKeys + ? context.store.getStorage(...storageKeys) + : Object.create(null), )) as T; } @@ -695,7 +765,7 @@ function makeFieldFunctionOptions( objectOrReference: StoreObject | Reference | undefined, fieldSpec: FieldSpecifier, context: ReadMergeModifyContext, - storage: StorageType | null, + storage: StorageType, ): FieldFunctionOptions { const storeFieldName = policies.getStoreFieldName(fieldSpec); const fieldName = fieldNameFromStoreName(storeFieldName); diff --git a/src/cache/inmemory/reactiveVars.ts b/src/cache/inmemory/reactiveVars.ts index c702104aa59..f063bb03546 100644 --- a/src/cache/inmemory/reactiveVars.ts +++ b/src/cache/inmemory/reactiveVars.ts @@ -3,7 +3,12 @@ import { dep } from "optimism"; import { InMemoryCache } from "./inMemoryCache"; import { ApolloCache } from '../../core'; -export type ReactiveVar = (newValue?: T) => T; +export interface ReactiveVar { + (newValue?: T): T; + onNextChange(listener: ReactiveListener): () => void; +} + +export type ReactiveListener = (value: T) => any; const varDep = dep>(); @@ -11,17 +16,34 @@ const varDep = dep>(); // called in Policies#readField. export const cacheSlot = new Slot>(); +// A listener function could in theory cause another listener to be added +// to the set while we're iterating over it, so it's important to commit +// to the original elements of the set before we begin iterating. See +// iterateObserversSafely for another example of this pattern. +function consumeAndIterate(set: Set, callback: (item: T) => any) { + const items: T[] = []; + set.forEach(item => items.push(item)); + set.clear(); + items.forEach(callback); +} + export function makeVar(value: T): ReactiveVar { const caches = new Set>(); + const listeners = new Set>(); - return function rv(newValue) { + const rv: ReactiveVar = function (newValue) { if (arguments.length > 0) { if (value !== newValue) { value = newValue!; + // First, invalidate any fields with custom read functions that + // consumed this variable, so query results involving those fields + // will be recomputed the next time we read them. varDep.dirty(rv); - // Trigger broadcast for any caches that were previously involved - // in reading this variable. + // Next, broadcast changes to any caches that have previously read + // from this variable. caches.forEach(broadcast); + // Finally, notify any listeners added via rv.onNextChange. + consumeAndIterate(listeners, listener => listener(value)); } } else { // When reading from the variable, obtain the current cache from @@ -34,12 +56,21 @@ export function makeVar(value: T): ReactiveVar { return value; }; + + rv.onNextChange = listener => { + listeners.add(listener); + return () => { + listeners.delete(listener); + }; + }; + + return rv; } type Broadcastable = ApolloCache & { // This method is protected in InMemoryCache, which we are ignoring, but // we still want some semblance of type safety when we call it. - broadcastWatches: InMemoryCache["broadcastWatches"]; + broadcastWatches?: InMemoryCache["broadcastWatches"]; }; function broadcast(cache: Broadcastable) { diff --git a/src/cache/inmemory/readFromStore.ts b/src/cache/inmemory/readFromStore.ts index d0b343db628..1abafb40f30 100644 --- a/src/cache/inmemory/readFromStore.ts +++ b/src/cache/inmemory/readFromStore.ts @@ -1,8 +1,6 @@ import { DocumentNode, FieldNode, - FragmentDefinitionNode, - InlineFragmentNode, SelectionSetNode, } from 'graphql'; import { wrap, OptimisticWrapperFunction } from 'optimism'; @@ -10,7 +8,6 @@ import { invariant, InvariantError } from 'ts-invariant'; import { isField, - isInlineFragment, resultKeyNameFromField, Reference, isReference, @@ -26,11 +23,11 @@ import { getQueryDefinition, maybeDeepFreeze, mergeDeepArray, + getFragmentFromSelection, } from '../../utilities'; import { Cache } from '../core/types/Cache'; import { DiffQueryAgainstStoreOptions, - ReadQueryOptions, NormalizedCache, ReadMergeModifyContext, } from './types'; @@ -47,6 +44,7 @@ interface ReadContext extends ReadMergeModifyContext { policies: Policies; fragmentMap: FragmentMap; path: (string | number)[]; + clientOnly: boolean; }; export type ExecResult = { @@ -62,6 +60,7 @@ function missingFromInvariant( err.message, context.path.slice(), context.query, + context.clientOnly, context.variables, ); } @@ -88,26 +87,6 @@ export class StoreReader { this.config = { addTypename: true, ...config }; } - /** - * Resolves the result of a query solely from the store (i.e. never hits the server). - * - * @param {Store} store The {@link NormalizedCache} used by Apollo for the `data` portion of the - * store. - * - * @param {DocumentNode} query The query document to resolve from the data available in the store. - * - * @param {Object} [variables] A map from the name of a variable to its value. These variables can - * be referenced by the query document. - */ - public readQueryFromStore( - options: ReadQueryOptions, - ): QueryType | undefined { - return this.diffQueryAgainstStore({ - ...options, - returnPartialData: false, - }).result; - } - /** * Given a store and a query, return as much of the result as possible and * identify if any data was missing from the store. @@ -140,6 +119,7 @@ export class StoreReader { varString: JSON.stringify(variables), fragmentMap: createFragmentMap(getFragmentDefinitions(query)), path: [], + clientOnly: false, }, }); @@ -219,7 +199,7 @@ export class StoreReader { }; } - const { fragmentMap, variables, policies, store } = context; + const { variables, policies, store } = context; const objectsToMerge: { [key: string]: any }[] = []; const finalResult: ExecResult = { result: null }; const typename = store.getFieldValue(objectOrReference, "__typename"); @@ -260,6 +240,20 @@ export class StoreReader { const resultName = resultKeyNameFromField(selection); context.path.push(resultName); + // If this field has an @client directive, then the field and + // everything beneath it is client-only, meaning it will never be + // sent to the server. + const wasClientOnly = context.clientOnly; + // Once we enter a client-only subtree of the query, we can avoid + // repeatedly checking selection.directives. + context.clientOnly = wasClientOnly || !!( + // We don't use the hasDirectives helper here, because it looks + // for directives anywhere inside the AST node, whereas we only + // care about directives directly attached to this field. + selection.directives && + selection.directives.some(d => d.name.value === "client") + ); + if (fieldValue === void 0) { if (!addTypenameToDocument.added(selection)) { getMissing().push( @@ -312,22 +306,17 @@ export class StoreReader { objectsToMerge.push({ [resultName]: fieldValue }); } + context.clientOnly = wasClientOnly; + invariant(context.path.pop() === resultName); } else { - let fragment: InlineFragmentNode | FragmentDefinitionNode; - - if (isInlineFragment(selection)) { - fragment = selection; - } else { - // This is a named fragment - invariant( - fragment = fragmentMap[selection.name.value], - `No fragment named ${selection.name.value}`, - ); - } + const fragment = getFragmentFromSelection( + selection, + context.fragmentMap, + ); - if (policies.fragmentMatches(fragment, typename)) { + if (fragment && policies.fragmentMatches(fragment, typename)) { fragment.selectionSet.selections.forEach(workSet.add, workSet); } } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 9e83bf65932..bed9b7adb60 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -7,7 +7,7 @@ import { Reference, } from '../../utilities'; import { FieldValueGetter } from './entityStore'; -import { KeyFieldsFunction } from './policies'; +import { KeyFieldsFunction, StorageType } from './policies'; import { Modifier, Modifiers, @@ -62,6 +62,11 @@ export interface NormalizedCache { getFieldValue: FieldValueGetter; toReference: ToReferenceFunction; canRead: CanReadFunction; + + getStorage( + idOrObj: string | StoreObject, + storeFieldName: string, + ): StorageType; } /** diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts index fea1d64bf5c..1d05ba8e3f2 100644 --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -89,7 +89,7 @@ export class StoreWriter { ...variables, }; - const objOrRef = this.processSelectionSet({ + const ref = this.processSelectionSet({ result: result || Object.create(null), dataId, selectionSet: operationDefinition.selectionSet, @@ -105,15 +105,14 @@ export class StoreWriter { }, }); - const ref = isReference(objOrRef) ? objOrRef : - dataId && makeReference(dataId) || void 0; - - if (ref) { - // Any IDs written explicitly to the cache (including ROOT_QUERY, - // most frequently) will be retained as reachable root IDs. - store.retain(ref.__ref); + if (!isReference(ref)) { + throw new InvariantError(`Could not identify object ${JSON.stringify(result)}`); } + // Any IDs written explicitly to the cache (including ROOT_QUERY, + // most frequently) will be retained as reachable root IDs. + store.retain(ref.__ref); + return ref; } @@ -246,7 +245,26 @@ export class StoreWriter { context.fragmentMap, ); - if (fragment && policies.fragmentMatches(fragment, typename)) { + if (fragment && + // By passing result and context.variables, we enable + // policies.fragmentMatches to bend the rules when typename is + // not a known subtype of the fragment type condition, but the + // result object contains all the keys requested by the + // fragment, which strongly suggests the fragment probably + // matched. This fuzzy matching behavior must be enabled by + // including a regular expression string (such as ".*" or + // "Prefix.*" or ".*Suffix") in the possibleTypes array for + // specific supertypes; otherwise, all matching remains exact. + // Fuzzy matches are remembered by the Policies object and + // later used when reading from the cache. Since there is no + // incoming result object to check when reading, reading does + // not involve the same fuzzy inference, so the StoreReader + // class calls policies.fragmentMatches without passing result + // or context.variables. The flexibility of fuzzy matching + // allows existing clients to accommodate previously unknown + // __typename strings produced by server/schema changes, which + // would otherwise be breaking changes. + policies.fragmentMatches(fragment, typename, result, context.variables)) { fragment.selectionSet.selections.forEach(workSet.add, workSet); } } diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts index 6fd3ca69062..b4d58404ad7 100644 --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -277,7 +277,7 @@ export class ApolloClient implements DataProxy { * a description of store reactivity. */ public watchQuery( - options: WatchQueryOptions, + options: WatchQueryOptions, ): ObservableQuery { if (this.defaultOptions.watchQuery) { options = compact(this.defaultOptions.watchQuery, options); @@ -305,7 +305,7 @@ export class ApolloClient implements DataProxy { * server at all or just resolve from the cache, etc. */ public query( - options: QueryOptions, + options: QueryOptions, ): Promise> { if (this.defaultOptions.query) { options = compact(this.defaultOptions.query, options); @@ -347,7 +347,7 @@ export class ApolloClient implements DataProxy { * {@link Observable} which either emits received data or an error. */ public subscribe( - options: SubscriptionOptions, + options: SubscriptionOptions, ): Observable> { return this.queryManager.startGraphQLSubscription(options); } @@ -362,7 +362,7 @@ export class ApolloClient implements DataProxy { * optimistic results. Is `false` by default. */ public readQuery( - options: DataProxy.Query, + options: DataProxy.Query, optimistic: boolean = false, ): T | null { return this.cache.readQuery(options, optimistic); @@ -383,7 +383,7 @@ export class ApolloClient implements DataProxy { * optimistic results. Is `false` by default. */ public readFragment( - options: DataProxy.Fragment, + options: DataProxy.Fragment, optimistic: boolean = false, ): T | null { return this.cache.readFragment(options, optimistic); diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts index 907a7d588a2..d4b31c4083f 100644 --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -1,4 +1,4 @@ -import { invariant, InvariantError } from 'ts-invariant'; +import { invariant } from 'ts-invariant'; import { equal } from '@wry/equality'; import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus'; @@ -45,7 +45,7 @@ export class ObservableQuery< TData = any, TVariables = OperationVariables > extends Observable> { - public readonly options: WatchQueryOptions; + public readonly options: WatchQueryOptions; public readonly queryId: string; public readonly queryName?: string; @@ -72,7 +72,7 @@ export class ObservableQuery< }: { queryManager: QueryManager; queryInfo: QueryInfo; - options: WatchQueryOptions; + options: WatchQueryOptions; }) { super((observer: Observer>) => this.onSubscribe(observer), @@ -125,7 +125,7 @@ export class ObservableQuery< }); } - public getCurrentResult(): ApolloQueryResult { + public getCurrentResult(saveAsLastResult = true): ApolloQueryResult { const { lastResult } = this; const networkStatus = @@ -146,7 +146,9 @@ export class ObservableQuery< const { fetchPolicy = 'cache-first' } = this.options; if (fetchPolicy === 'no-cache' || fetchPolicy === 'network-only') { - result.partial = false; + // Similar to setting result.partial to false, but taking advantage + // of the falsiness of missing fields. + delete result.partial; } else if ( !result.data || // If this.options.query has @client(always: true) fields, we cannot @@ -159,24 +161,29 @@ export class ObservableQuery< !this.queryManager.transform(this.options.query).hasForcedResolvers ) { const diff = this.queryInfo.getDiff(); - result.partial = !diff.complete; result.data = ( diff.complete || this.options.returnPartialData ) ? diff.result : void 0; - // If the cache diff is complete, and we're using a FetchPolicy that - // terminates after a complete cache read, we can assume the next - // result we receive will have NetworkStatus.ready and !loading. - if (diff.complete && - result.networkStatus === NetworkStatus.loading && - (fetchPolicy === 'cache-first' || - fetchPolicy === 'cache-only')) { - result.networkStatus = NetworkStatus.ready; - result.loading = false; + if (diff.complete) { + // If the diff is complete, and we're using a FetchPolicy that + // terminates after a complete cache read, we can assume the next + // result we receive will have NetworkStatus.ready and !loading. + if (result.networkStatus === NetworkStatus.loading && + (fetchPolicy === 'cache-first' || + fetchPolicy === 'cache-only')) { + result.networkStatus = NetworkStatus.ready; + result.loading = false; + } + delete result.partial; + } else { + result.partial = true; } } - this.updateLastResult(result); + if (saveAsLastResult) { + this.updateLastResult(result); + } return result; } @@ -216,15 +223,7 @@ export class ObservableQuery< * the previous values of those variables will be used. */ public refetch(variables?: Partial): Promise> { - let { fetchPolicy } = this.options; - // early return if trying to read from cache during refetch - if (fetchPolicy === 'cache-only') { - return Promise.reject(new InvariantError( - 'cache-only fetchPolicy option should not be used together with query refetch.', - )); - } - - const reobserveOptions: Partial> = { + const reobserveOptions: Partial> = { // Always disable polling for refetches. pollInterval: 0, }; @@ -232,11 +231,12 @@ export class ObservableQuery< // Unless the provided fetchPolicy always consults the network // (no-cache, network-only, or cache-and-network), override it with // network-only to force the refetch for this fetchQuery call. + const { fetchPolicy } = this.options; if (fetchPolicy !== 'no-cache' && fetchPolicy !== 'cache-and-network') { reobserveOptions.fetchPolicy = 'network-only'; // Go back to the original options.fetchPolicy after this refetch. - reobserveOptions.nextFetchPolicy = fetchPolicy; + reobserveOptions.nextFetchPolicy = fetchPolicy || "cache-first"; } if (variables && !equal(this.options.variables, variables)) { @@ -254,7 +254,7 @@ export class ObservableQuery< } public fetchMore( - fetchMoreOptions: FetchMoreQueryOptions & + fetchMoreOptions: FetchMoreQueryOptions & FetchMoreOptions, ): Promise> { const combinedOptions = { @@ -276,36 +276,11 @@ export class ObservableQuery< const qid = this.queryManager.generateQueryId(); + // Simulate a loading result for the original query with + // result.networkStatus === NetworkStatus.fetchMore. if (combinedOptions.notifyOnNetworkStatusChange) { - const currentResult = this.getCurrentResult(); - - // If we neglect to update queryInfo.networkStatus here, - // getCurrentResult may return a loading:false result while - // fetchMore is in progress, since getCurrentResult also consults - // queryInfo.networkStatus. Note: setting queryInfo.networkStatus - // to an in-flight status means that QueryInfo#shouldNotify will - // return false while fetchMore is in progress, which is why we - // call this.reobserve() explicitly in the .finally callback after - // fetchMore (below), since the cache write will not automatically - // trigger a notification, even though it does trigger a cache - // broadcast. This is a good thing, because it means we won't see - // intervening query notifications while fetchMore is pending. this.queryInfo.networkStatus = NetworkStatus.fetchMore; - - // Simulate a loading result for the original query with - // networkStatus === NetworkStatus.fetchMore. - this.observer.next!({ - // Note that currentResult is an ApolloCurrentQueryResult, - // whereas this.observer.next expects an ApolloQueryResult. - // Fortunately, ApolloCurrentQueryResult is a subtype of - // ApolloQueryResult (with additional .error and .partial fields), - // so TypeScript has no problem with this sleight of hand. - // TODO Consolidate these two types into a single type (most - // likely just ApolloQueryResult) after AC3 is released. - ...currentResult, - loading: true, - networkStatus: NetworkStatus.fetchMore, - }); + this.observe(); } return this.queryManager.fetchQuery( @@ -409,7 +384,7 @@ once, rather than every time you call fetchMore.`); } public setOptions( - newOptions: Partial>, + newOptions: Partial>, ): Promise> { return this.reobserve(newOptions); } @@ -459,22 +434,28 @@ once, rather than every time you call fetchMore.`); } let { fetchPolicy = 'cache-first' } = this.options; + const reobserveOptions: Partial> = { + fetchPolicy, + variables, + }; + if (fetchPolicy !== 'cache-first' && fetchPolicy !== 'no-cache' && fetchPolicy !== 'network-only') { - fetchPolicy = 'cache-and-network'; + reobserveOptions.fetchPolicy = 'cache-and-network'; + reobserveOptions.nextFetchPolicy = fetchPolicy; } - return this.reobserve({ - fetchPolicy, - variables, - }, NetworkStatus.setVariables); + return this.reobserve( + reobserveOptions, + NetworkStatus.setVariables, + ); } public updateQuery( mapFn: ( previousQueryResult: TData, - options: Pick, "variables">, + options: Pick, "variables">, ) => TData, ): void { const { queryManager } = this; @@ -599,15 +580,25 @@ once, rather than every time you call fetchMore.`); } public reobserve( - newOptions?: Partial>, + newOptions?: Partial>, newNetworkStatus?: NetworkStatus, ): Promise> { this.isTornDown = false; return this.getReobserver().reobserve(newOptions, newNetworkStatus); } - private observer: Observer> = { - next: result => { + // Pass the current result to this.observer.next without applying any + // fetch policies, bypassing the Reobserver. + private observe() { + // Passing false is important so that this.getCurrentResult doesn't + // save the fetchMore result as this.lastResult, causing it to be + // ignored due to the this.isDifferentFromLastResult check in + // this.observer.next. + this.observer.next(this.getCurrentResult(false)); + } + + private observer = { + next: (result: ApolloQueryResult) => { if (this.lastError || this.isDifferentFromLastResult(result)) { this.updateLastResult(result); iterateObserversSafely(this.observers, 'next', result); diff --git a/src/core/QueryInfo.ts b/src/core/QueryInfo.ts index 333a22fbce6..e3c82e511c7 100644 --- a/src/core/QueryInfo.ts +++ b/src/core/QueryInfo.ts @@ -2,7 +2,7 @@ import { DocumentNode, GraphQLError } from 'graphql'; import { equal } from "@wry/equality"; import { Cache, ApolloCache } from '../cache'; -import { WatchQueryOptions } from './watchQueryOptions'; +import { WatchQueryOptions, ErrorPolicy } from './watchQueryOptions'; import { ObservableQuery } from './ObservableQuery'; import { QueryListener } from './types'; import { FetchResult } from '../link/core'; @@ -10,6 +10,7 @@ import { ObservableSubscription, isNonEmptyArray, graphQLResultHasError, + canUseWeakMap, } from '../utilities'; import { NetworkStatus, @@ -24,6 +25,30 @@ export type QueryStoreValue = Pick; +const destructiveMethodCounts = new ( + canUseWeakMap ? WeakMap : Map +), number>(); + +function wrapDestructiveCacheMethod( + cache: ApolloCache, + methodName: keyof ApolloCache, +) { + const original = cache[methodName]; + if (typeof original === "function") { + cache[methodName] = function () { + destructiveMethodCounts.set( + cache, + // The %1e15 allows the count to wrap around to 0 safely every + // quadrillion evictions, so there's no risk of overflow. To be + // clear, this is more of a pedantic principle than something + // that matters in any conceivable practical scenario. + (destructiveMethodCounts.get(cache)! + 1) % 1e15, + ); + return original.apply(this, arguments); + }; + } +} + // A QueryInfo object represents a single query managed by the // QueryManager, which tracks all QueryInfo objects by queryId in its // this.queries Map. QueryInfo objects store the latest results and errors @@ -46,7 +71,18 @@ export class QueryInfo { networkError?: Error | null; graphQLErrors?: ReadonlyArray; - constructor(private cache: ApolloCache) {} + constructor(private cache: ApolloCache) { + // Track how often cache.evict is called, since we want eviction to + // override the feud-stopping logic in the markResult method, by + // causing shouldWrite to return true. Wrapping the cache.evict method + // is a bit of a hack, but it saves us from having to make eviction + // counting an official part of the ApolloCache API. + if (!destructiveMethodCounts.has(cache)) { + destructiveMethodCounts.set(cache, 0); + wrapDestructiveCacheMethod(cache, "evict"); + wrapDestructiveCacheMethod(cache, "modify"); + } + } public init(query: { document: DocumentNode; @@ -112,7 +148,8 @@ export class QueryInfo { setDiff(diff: Cache.DiffResult | null) { const oldDiff = this.diff; this.diff = diff; - if (!this.dirty && diff?.result !== oldDiff?.result) { + if (!this.dirty && + (diff && diff.result) !== (oldDiff && oldDiff.result)) { this.dirty = true; if (!this.notifyTimeout) { this.notifyTimeout = setTimeout(() => this.notify(), 0); @@ -134,7 +171,18 @@ export class QueryInfo { if (oq) { oq["queryInfo"] = this; - this.listeners.add(this.oqListener = () => oq.reobserve()); + this.listeners.add(this.oqListener = () => { + // If this.diff came from an optimistic transaction, deliver the + // current cache data to the ObservableQuery, but don't perform a + // full reobservation, since oq.reobserve might make a network + // request, and we don't want to trigger network requests for + // optimistic updates. + if (this.getDiff().fromOptimisticTransaction) { + oq["observe"](); + } else { + oq.reobserve(); + } + }); } else { delete this.oqListener; } @@ -204,8 +252,27 @@ export class QueryInfo { } } - private lastWrittenResult?: FetchResult; - private lastWrittenVars?: WatchQueryOptions["variables"]; + private lastWrite?: { + result: FetchResult; + variables: WatchQueryOptions["variables"]; + dmCount: number | undefined; + }; + + private shouldWrite( + result: FetchResult, + variables: WatchQueryOptions["variables"], + ) { + const { lastWrite } = this; + return !( + lastWrite && + // If cache.evict has been called since the last time we wrote this + // data into the cache, there's a chance writing this result into + // the cache will repair what was evicted. + lastWrite.dmCount === destructiveMethodCounts.get(this.cache) && + equal(variables, lastWrite.variables) && + equal(result.data, lastWrite.result.data) + ); + } public markResult( result: FetchResult, @@ -221,23 +288,25 @@ export class QueryInfo { this.diff = { result: result.data, complete: true }; } else if (allowCacheWrite) { - const ignoreErrors = - options.errorPolicy === 'ignore' || - options.errorPolicy === 'all'; - let writeWithErrors = !graphQLResultHasError(result); - if (!writeWithErrors && ignoreErrors && result.data) { - writeWithErrors = true; - } - - if (writeWithErrors) { + if (shouldWriteResult(result, options.errorPolicy)) { // Using a transaction here so we have a chance to read the result // back from the cache before the watch callback fires as a result // of writeQuery, so we can store the new diff quietly and ignore // it when we receive it redundantly from the watch callback. this.cache.performTransaction(cache => { - if (this.lastWrittenResult && - equal(result.data, this.lastWrittenResult.data) && - equal(options.variables, this.lastWrittenVars)) { + if (this.shouldWrite(result, options.variables)) { + cache.writeQuery({ + query: this.document!, + data: result.data as T, + variables: options.variables, + }); + + this.lastWrite = { + result, + variables: options.variables, + dmCount: destructiveMethodCounts.get(this.cache), + }; + } else { // If result is the same as the last result we received from // the network (and the variables match too), avoid writing // result into the cache again. The wisdom of skipping this @@ -278,14 +347,6 @@ export class QueryInfo { } // If the previous this.diff was incomplete, fall through to // re-reading the latest data with cache.diff, below. - } else { - cache.writeQuery({ - query: this.document!, - data: result.data as T, - variables: options.variables, - }); - this.lastWrittenResult = result; - this.lastWrittenVars = options.variables; } const diff = cache.diff({ @@ -311,7 +372,7 @@ export class QueryInfo { }); } else { - this.lastWrittenResult = this.lastWrittenVars = void 0; + this.lastWrite = void 0; } } } @@ -323,7 +384,7 @@ export class QueryInfo { public markError(error: ApolloError) { this.networkStatus = NetworkStatus.error; - this.lastWrittenResult = this.lastWrittenVars = void 0; + this.lastWrite = void 0; if (error.graphQLErrors) { this.graphQLErrors = error.graphQLErrors; @@ -336,3 +397,17 @@ export class QueryInfo { return error; } } + +export function shouldWriteResult( + result: FetchResult, + errorPolicy: ErrorPolicy = "none", +) { + const ignoreErrors = + errorPolicy === "ignore" || + errorPolicy === "all"; + let writeWithErrors = !graphQLResultHasError(result); + if (!writeWithErrors && ignoreErrors && result.data) { + writeWithErrors = true; + } + return writeWithErrors; +} diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts index 6966570b9d1..42fde4aefcb 100644 --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -39,7 +39,7 @@ import { } from './types'; import { LocalState } from './LocalState'; -import { QueryInfo, QueryStoreValue } from './QueryInfo'; +import { QueryInfo, QueryStoreValue, shouldWriteResult } from './QueryInfo'; const { hasOwnProperty } = Object.prototype; @@ -192,6 +192,7 @@ export class QueryManager { result: { data: optimistic }, document: mutation, variables: variables, + errorPolicy, queryUpdatersById: generateUpdateQueriesInfo(), update: updateWithProxyFn, }, cache); @@ -235,6 +236,7 @@ export class QueryManager { result, document: mutation, variables, + errorPolicy, queryUpdatersById: generateUpdateQueriesInfo(), update: updateWithProxyFn, }, self.cache); @@ -333,7 +335,7 @@ export class QueryManager { public fetchQuery( queryId: string, - options: WatchQueryOptions, + options: WatchQueryOptions, networkStatus?: NetworkStatus, ): Promise> { return this.fetchQueryObservable( @@ -428,7 +430,7 @@ export class QueryManager { } public watchQuery( - options: WatchQueryOptions, + options: WatchQueryOptions, ): ObservableQuery { // assign variable default values if supplied options = { @@ -462,7 +464,7 @@ export class QueryManager { } public query( - options: QueryOptions, + options: QueryOptions, ): Promise> { invariant( options.query, @@ -588,6 +590,7 @@ export class QueryManager { public startGraphQLSubscription({ query, fetchPolicy, + errorPolicy, variables, context = {}, }: SubscriptionOptions): Observable> { @@ -601,10 +604,10 @@ export class QueryManager { variables, false, ).map(result => { - if (!fetchPolicy || fetchPolicy !== 'no-cache') { + if (fetchPolicy !== 'no-cache') { // the subscription interface should handle not sending us results we no longer subscribe to. // XXX I don't think we ever send in an object with errors, but we might in the future... - if (!graphQLResultHasError(result)) { + if (shouldWriteResult(result, errorPolicy)) { this.cache.write({ query, result: result.data, @@ -759,7 +762,7 @@ export class QueryManager { private getResultsFromLink( queryInfo: QueryInfo, allowCacheWrite: boolean, - options: Pick, + options: Pick, | "variables" | "context" | "fetchPolicy" @@ -817,7 +820,7 @@ export class QueryManager { public fetchQueryObservable( queryId: string, - options: WatchQueryOptions, + options: WatchQueryOptions, // The initial networkStatus for this fetch, most often // NetworkStatus.loading, but also possibly fetchMore, poll, refetch, // or setVariables. @@ -909,7 +912,13 @@ export class QueryManager { concast.cleanup(() => { this.fetchCancelFns.delete(queryId); - if (options.nextFetchPolicy) { + const { nextFetchPolicy } = options; + if (nextFetchPolicy) { + // The options.nextFetchPolicy transition should happen only once, + // but it should be possible for a nextFetchPolicy function to set + // this.nextFetchPolicy to perform an additional transition. + options.nextFetchPolicy = void 0; + // When someone chooses cache-and-network or network-only as their // initial FetchPolicy, they often do not want future cache updates to // trigger unconditional network requests, which is what repeatedly @@ -920,9 +929,9 @@ export class QueryManager { // The options.nextFetchPolicy option provides an easy way to update // options.fetchPolicy after the intial network request, without // having to call observableQuery.setOptions. - options.fetchPolicy = options.nextFetchPolicy; - // The options.nextFetchPolicy transition should happen only once. - options.nextFetchPolicy = void 0; + options.fetchPolicy = typeof nextFetchPolicy === "function" + ? nextFetchPolicy.call(options, options.fetchPolicy || "cache-first") + : nextFetchPolicy; } }); @@ -931,7 +940,7 @@ export class QueryManager { private fetchQueryByPolicy( queryInfo: QueryInfo, - options: WatchQueryOptions, + options: WatchQueryOptions, // The initial networkStatus for this fetch, most often // NetworkStatus.loading, but also possibly fetchMore, poll, refetch, // or setVariables. @@ -1072,6 +1081,7 @@ function markMutationResult( result: FetchResult; document: DocumentNode; variables: any; + errorPolicy: ErrorPolicy; queryUpdatersById: Record; update: ((cache: ApolloCache, mutationResult: Object) => void) | @@ -1080,7 +1090,7 @@ function markMutationResult( cache: ApolloCache, ) { // Incorporate the result from this mutation into the store - if (!graphQLResultHasError(mutation.result)) { + if (shouldWriteResult(mutation.result, mutation.errorPolicy)) { const cacheWrites: Cache.WriteOptions[] = [{ result: mutation.result.data, dataId: 'ROOT_MUTATION', diff --git a/src/core/Reobserver.ts b/src/core/Reobserver.ts index e658985005a..e5cc8ed28bc 100644 --- a/src/core/Reobserver.ts +++ b/src/core/Reobserver.ts @@ -17,13 +17,13 @@ import { invariant } from 'ts-invariant'; export class Reobserver { constructor( private observer: Observer>, - private options: WatchQueryOptions, + private options: WatchQueryOptions, // Almost certainly just a wrapper function around // QueryManager#fetchQueryObservable, but this small dose of // indirection means the Reobserver doesn't have to know/assume // anything about the QueryManager class. private fetch: ( - options: WatchQueryOptions, + options: WatchQueryOptions, newNetworkStatus?: NetworkStatus, ) => Concast>, // If we're polling, there may be times when we should avoid fetching, @@ -37,7 +37,7 @@ export class Reobserver { private concast?: Concast>; public reobserve( - newOptions?: Partial>, + newOptions?: Partial>, newNetworkStatus?: NetworkStatus, ): Promise> { if (newOptions) { @@ -66,7 +66,7 @@ export class Reobserver { return (this.concast = concast).promise; } - public updateOptions(newOptions: Partial>) { + public updateOptions(newOptions: Partial>) { Object.assign(this.options, compact(newOptions)); this.updatePolling(); return this; @@ -133,6 +133,7 @@ export class Reobserver { if (this.shouldFetch && this.shouldFetch()) { this.reobserve({ fetchPolicy: "network-only", + nextFetchPolicy: this.options.fetchPolicy || "cache-first", }, NetworkStatus.poll).then(poll, poll); } else { poll(); diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts index d2ec4b1d356..c220cb36895 100644 --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -1361,7 +1361,6 @@ describe('ObservableQuery', () => { data: dataOne, loading: false, networkStatus: 7, - partial: false, }); resolve(); }); @@ -1406,7 +1405,6 @@ describe('ObservableQuery', () => { data: dataOne, loading: false, networkStatus: NetworkStatus.ready, - partial: false, }); }).then(resolve, reject); }); @@ -1502,7 +1500,7 @@ describe('ObservableQuery', () => { const queryManager = mockQueryManager(reject, { request: { query, variables }, result: { data: dataOne, errors: [error] }, - }, + }, // FIXME: We shouldn't need a second mock, there should only be one network request { request: { query, variables }, @@ -1651,7 +1649,6 @@ describe('ObservableQuery', () => { data: void 0, loading: true, networkStatus: 1, - partial: false, }); subscribeAndCount(reject, observable, (handleCount, subResult) => { @@ -1659,7 +1656,6 @@ describe('ObservableQuery', () => { expect(subResult).toEqual({ loading: true, networkStatus: NetworkStatus.loading, - partial: false, }); } else if (handleCount === 2) { expect(subResult).toEqual({ @@ -1696,7 +1692,6 @@ describe('ObservableQuery', () => { data: undefined, loading: true, networkStatus: 1, - partial: false, }); subscribeAndCount(reject, observable, (handleCount, subResult) => { @@ -1711,7 +1706,6 @@ describe('ObservableQuery', () => { data, loading, networkStatus, - partial: false, }); } else if (handleCount === 2) { expect(stripSymbols(subResult)).toEqual({ diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts index 02ef884f56e..a9adc0ac168 100644 --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -2288,6 +2288,112 @@ describe('QueryManager', () => { }); }); + itAsync("should disable feud-stopping logic after evict or modify", (resolve, reject) => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + info: { + merge: false, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + cache, + link: new ApolloLink(operation => new Observable((observer: Observer) => { + observer.next!({ data: { info: { c: "see" }}}); + observer.complete!(); + })), + }); + + const query = gql`query { info { c } }`; + + const obs = client.watchQuery({ + query, + returnPartialData: true, + }); + + subscribeAndCount(reject, obs, (count, result) => { + if (count === 1) { + expect(result).toEqual({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + } else if (count === 2) { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); + + cache.evict({ + fieldName: "info", + }); + + } else if (count === 3) { + expect(result).toEqual({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + } else if (count === 4) { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); + + cache.modify({ + fields: { + info(_, { DELETE }) { + return DELETE; + }, + }, + }); + + } else if (count === 5) { + expect(result).toEqual({ + loading: true, + networkStatus: NetworkStatus.loading, + data: {}, + partial: true, + }); + + } else if (count === 6) { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + info: { + c: "see", + }, + }, + }); + + setTimeout(resolve, 100); + + } else { + reject(new Error(`Unexpected ${JSON.stringify({count,result})}`)); + } + }); + }); + itAsync('should not error when replacing unidentified data with a normalized ID', (resolve, reject) => { const queryWithoutId = gql` query { diff --git a/src/core/__tests__/fetchPolicies.ts b/src/core/__tests__/fetchPolicies.ts index abf78b225e1..5f1f0731982 100644 --- a/src/core/__tests__/fetchPolicies.ts +++ b/src/core/__tests__/fetchPolicies.ts @@ -3,6 +3,7 @@ import gql from 'graphql-tag'; import { ApolloClient, NetworkStatus } from '../../core'; import { ApolloLink } from '../../link/core'; import { InMemoryCache } from '../../cache'; +import { Observable } from '../../utilities'; import { stripSymbols, subscribeAndCount, @@ -439,6 +440,62 @@ describe('cache-first', () => { }); }); +describe('cache-only', () => { + itAsync('allows explicit refetch to happen', (resolve, reject) => { + let counter = 0; + const client = new ApolloClient({ + cache: new InMemoryCache, + link: new ApolloLink(operation => new Observable(observer => { + observer.next({ + data: { + count: ++counter, + }, + }); + observer.complete(); + })), + }); + + const query = gql`query { counter }`; + + const observable = client.watchQuery({ + query, + nextFetchPolicy: 'cache-only', + }); + + subscribeAndCount(reject, observable, (count, result) => { + if (count === 1) { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + count: 1, + }, + }); + + expect(observable.options.fetchPolicy).toBe('cache-only'); + + observable.refetch().catch(reject); + + } else if (count === 2) { + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + count: 2, + }, + }); + + expect(observable.options.fetchPolicy).toBe('cache-only'); + + setTimeout(resolve, 50); + + } else { + reject(`too many results (${count})`); + } + }); + }); +}); + describe('cache-and-network', function() { itAsync('gives appropriate networkStatus for refetched queries', (resolve, reject) => { const client = new ApolloClient({ diff --git a/src/core/types.ts b/src/core/types.ts index a58cb921670..b139672fc52 100644 --- a/src/core/types.ts +++ b/src/core/types.ts @@ -6,6 +6,8 @@ import { QueryInfo } from './QueryInfo'; import { NetworkStatus } from './networkStatus'; import { Resolver } from './LocalState'; +export { TypedDocumentNode } from '@graphql-typed-document-node/core'; + export type QueryListener = (queryInfo: QueryInfo) => void; export type OperationVariables = Record; diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts index d865367214f..e3b2ebe6602 100644 --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -1,4 +1,5 @@ import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { ApolloCache } from '../cache'; import { FetchResult } from '../link/core'; @@ -34,14 +35,14 @@ export type ErrorPolicy = 'none' | 'ignore' | 'all'; /** * Common options shared across all query interfaces. */ -export interface QueryBaseOptions { +export interface QueryBaseOptions { /** * A GraphQL document that consists of a single query to be sent down to the * server. */ // TODO REFACTOR: rename this to document. Didn't do it yet because it's in a // lot of tests. - query: DocumentNode; + query: DocumentNode | TypedDocumentNode; /** * A map going from variable name to variable value, where the variables are used @@ -63,8 +64,8 @@ export interface QueryBaseOptions { /** * Query options. */ -export interface QueryOptions - extends QueryBaseOptions { +export interface QueryOptions + extends QueryBaseOptions { /** * Specifies the {@link FetchPolicy} to be used for this query */ @@ -74,8 +75,8 @@ export interface QueryOptions /** * We can change these options to an ObservableQuery */ -export interface ModifiableWatchQueryOptions - extends QueryBaseOptions { +export interface ModifiableWatchQueryOptions + extends QueryBaseOptions { /** * The time interval (in milliseconds) on which this query should be * refetched from the server. @@ -104,9 +105,9 @@ export interface ModifiableWatchQueryOptions /** * Watched query options. */ -export interface WatchQueryOptions - extends QueryBaseOptions, - ModifiableWatchQueryOptions { +export interface WatchQueryOptions + extends QueryBaseOptions, + ModifiableWatchQueryOptions { /** * Specifies the {@link FetchPolicy} to be used for this query. */ @@ -114,11 +115,14 @@ export interface WatchQueryOptions /** * Specifies the {@link FetchPolicy} to be used after this query has completed. */ - nextFetchPolicy?: WatchQueryFetchPolicy; + nextFetchPolicy?: WatchQueryFetchPolicy | (( + this: WatchQueryOptions, + lastFetchPolicy: WatchQueryFetchPolicy, + ) => WatchQueryFetchPolicy); } -export interface FetchMoreQueryOptions { - query?: DocumentNode; +export interface FetchMoreQueryOptions { + query?: DocumentNode | TypedDocumentNode; variables?: Pick; context?: any; } @@ -140,19 +144,19 @@ export type SubscribeToMoreOptions< TSubscriptionVariables = OperationVariables, TSubscriptionData = TData > = { - document: DocumentNode; + document: DocumentNode | TypedDocumentNode; variables?: TSubscriptionVariables; updateQuery?: UpdateQueryFn; onError?: (error: Error) => void; context?: Record; }; -export interface SubscriptionOptions { +export interface SubscriptionOptions { /** * A GraphQL document, often created with `gql` from the `graphql-tag` * package, that contains a single subscription inside of it. */ - query: DocumentNode; + query: DocumentNode | TypedDocumentNode; /** * An object that maps from the name of a variable as used in the subscription @@ -165,6 +169,11 @@ export interface SubscriptionOptions { */ fetchPolicy?: FetchPolicy; + /** + * Specifies the {@link ErrorPolicy} to be used for this operation + */ + errorPolicy?: ErrorPolicy; + /** * Context object to be passed through the link execution chain. */ @@ -256,7 +265,7 @@ export interface MutationOptions< * A GraphQL document, often created with `gql` from the `graphql-tag` * package, that contains a single mutation inside of it. */ - mutation: DocumentNode; + mutation: DocumentNode | TypedDocumentNode; /** * The context to be passed to the link execution chain. This context will diff --git a/src/link/persisted-queries/__tests__/index.ts b/src/link/persisted-queries/__tests__/index.ts new file mode 100644 index 00000000000..1779aa3416e --- /dev/null +++ b/src/link/persisted-queries/__tests__/index.ts @@ -0,0 +1,407 @@ +import gql from 'graphql-tag'; +import { sha256 } from 'crypto-hash'; +import { print } from 'graphql'; +import { times } from 'lodash'; +import fetch from 'jest-fetch-mock'; + +import { ApolloLink, execute } from '../../core'; +import { Observable } from '../../../utilities'; +import { createHttpLink } from '../../http/createHttpLink'; + +import { createPersistedQueryLink as createPersistedQuery, VERSION } from '../'; + +global.fetch = fetch; + +const makeAliasFields = (fieldName: string, numAliases: number) => + times(numAliases, idx => `${fieldName}${idx}: ${fieldName}`).reduce( + (aliasBody, currentAlias) => `${aliasBody}\n ${currentAlias}`, + ); + +const query = gql` + query Test($id: ID!) { + foo(id: $id) { + bar + ${makeAliasFields('title', 1000)} + } + } +`; + +const variables = { id: 1 }; +const queryString = print(query); +const data = { + foo: { bar: true }, +}; +const response = JSON.stringify({ data }); +const errors = [{ message: 'PersistedQueryNotFound' }]; +const giveUpErrors = [{ message: 'PersistedQueryNotSupported' }]; +const multipleErrors = [...errors, { message: 'not logged in' }]; +const errorResponse = JSON.stringify({ errors }); +const giveUpResponse = JSON.stringify({ errors: giveUpErrors }); +const multiResponse = JSON.stringify({ errors: multipleErrors }); + +let hash: string; +(async () => { + hash = await sha256(queryString); +})(); + +describe('happy path', () => { + beforeEach(fetch.mockReset); + + it('sends a sha256 hash of the query under extensions', done => { + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [uri, request] = fetch.mock.calls[0]; + expect(uri).toEqual('/graphql'); + expect(request!.body!).toBe( + JSON.stringify({ + operationName: 'Test', + variables, + extensions: { + persistedQuery: { + version: VERSION, + sha256Hash: hash, + }, + }, + }), + ); + done(); + }, done.fail); + }); + + it('sends a version along with the request', done => { + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [uri, request] = fetch.mock.calls[0]; + expect(uri).toEqual('/graphql'); + const parsed = JSON.parse(request!.body!.toString()); + expect(parsed.extensions.persistedQuery.version).toBe(VERSION); + done(); + }, done.fail); + }); + + it('memoizes between requests', done => { + fetch.mockResponseOnce(response); + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + + let start = new Date(); + execute(link, { query, variables }).subscribe(result => { + const firstRun = new Date().valueOf() - start.valueOf(); + expect(result.data).toEqual(data); + // this one should go faster becuase of memoization + let secondStart = new Date(); + execute(link, { query, variables }).subscribe(result2 => { + const secondRun = new Date().valueOf() - secondStart.valueOf(); + expect(firstRun).toBeGreaterThan(secondRun); + expect(result2.data).toEqual(data); + done(); + }, done.fail); + }, done.fail); + }); + + it('supports loading the hash from other method', done => { + fetch.mockResponseOnce(response); + const generateHash = + (query: any) => Promise.resolve('foo'); + const link = createPersistedQuery({ generateHash }).concat( + createHttpLink(), + ); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [uri, request] = fetch.mock.calls[0]; + expect(uri).toEqual('/graphql'); + const parsed = JSON.parse(request!.body!.toString()); + expect(parsed.extensions.persistedQuery.sha256Hash).toBe('foo'); + done(); + }, done.fail); + }); + + it('errors if unable to convert to sha256', done => { + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + + execute(link, { query: '1234', variables } as any).subscribe(done.fail as any, error => { + expect(error.message).toMatch(/Invalid AST Node/); + done(); + }); + }); + + it('unsubscribes correctly', done => { + const delay = new ApolloLink(() => { + return new Observable(ob => { + setTimeout(() => { + ob.next({ data }); + ob.complete(); + }, 100); + }); + }); + const link = createPersistedQuery({ sha256 }).concat(delay); + + const sub = execute(link, { query, variables }).subscribe( + done.fail as any, + done.fail, + done.fail, + ); + + setTimeout(() => { + sub.unsubscribe(); + done(); + }, 10); + }); + + it('should error if `sha256` and `generateHash` options are both missing', () => { + const createPersistedQueryFn = createPersistedQuery as any; + try { + createPersistedQueryFn(); + fail('should have thrown an error'); + } catch (error) { + expect( + error.message.indexOf( + 'Missing/invalid "sha256" or "generateHash" function' + ) + ).toBe(0); + } + }); + + it('should error if `sha256` or `generateHash` options are not functions', () => { + const createPersistedQueryFn = createPersistedQuery as any; + [ + { sha256: 'ooops' }, + { generateHash: 'ooops' } + ].forEach(options => { + try { + createPersistedQueryFn(options); + fail('should have thrown an error'); + } catch (error) { + expect( + error.message.indexOf( + 'Missing/invalid "sha256" or "generateHash" function' + ) + ).toBe(0); + } + }); + }); + + it('should work with a synchronous SHA-256 function', done => { + const crypto = require('crypto'); + const sha256Hash = crypto.createHmac('sha256', queryString).digest('hex'); + + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ + sha256(data) { + return crypto.createHmac('sha256', data).digest('hex'); + } + }).concat(createHttpLink()); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [uri, request] = fetch.mock.calls[0]; + expect(uri).toEqual('/graphql'); + expect(request!.body!).toBe( + JSON.stringify({ + operationName: 'Test', + variables, + extensions: { + persistedQuery: { + version: VERSION, + sha256Hash: sha256Hash, + }, + }, + }), + ); + done(); + }, done.fail); + }); +}); + +describe('failure path', () => { + beforeEach(fetch.mockReset); + + it('correctly identifies the error shape from the server', done => { + fetch.mockResponseOnce(errorResponse); + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, failure] = fetch.mock.calls[0]; + expect(JSON.parse(failure!.body!.toString()).query).not.toBeDefined(); + const [, success] = fetch.mock.calls[1]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect( + JSON.parse(success!.body!.toString()).extensions.persistedQuery.sha256Hash, + ).toBe(hash); + done(); + }, done.fail); + }); + + it('sends GET for the first response only with useGETForHashedQueries', done => { + fetch.mockResponseOnce(errorResponse); + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256, useGETForHashedQueries: true }).concat( + createHttpLink(), + ); + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, failure] = fetch.mock.calls[0]; + expect(failure!.method).toBe('GET'); + expect(failure!.body).not.toBeDefined(); + const [, success] = fetch.mock.calls[1]; + expect(success!.method).toBe('POST'); + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect( + JSON.parse(success!.body!.toString()).extensions.persistedQuery.sha256Hash, + ).toBe(hash); + done(); + }, done.fail); + }); + + it('does not try again after receiving NotSupported error', done => { + fetch.mockResponseOnce(giveUpResponse); + fetch.mockResponseOnce(response); + + // mock it again so we can verify it doesn't try anymore + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, failure] = fetch.mock.calls[0]; + expect(JSON.parse(failure!.body!.toString()).query).not.toBeDefined(); + const [, success] = fetch.mock.calls[1]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + execute(link, { query, variables }).subscribe(secondResult => { + expect(secondResult.data).toEqual(data); + + const [, success] = fetch.mock.calls[2]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + done(); + }, done.fail); + }, done.fail); + }); + + it('works with multiple errors', done => { + fetch.mockResponseOnce(multiResponse); + fetch.mockResponseOnce(response); + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, failure] = fetch.mock.calls[0]; + expect(JSON.parse(failure!.body!.toString()).query).not.toBeDefined(); + const [, success] = fetch.mock.calls[1]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect( + JSON.parse(success!.body!.toString()).extensions.persistedQuery.sha256Hash, + ).toBe(hash); + done(); + }, done.fail); + }); + + it('handles a 500 network error and still retries', done => { + let failed = false; + fetch.mockResponseOnce(response); + + // mock it again so we can verify it doesn't try anymore + fetch.mockResponseOnce(response); + + const fetcher = (...args: any[]) => { + if (!failed) { + failed = true; + return Promise.resolve({ + json: () => Promise.resolve('This will blow up'), + text: () => Promise.resolve('THIS WILL BLOW UP'), + status: 500, + }); + } + + return fetch(...args); + }; + const link = createPersistedQuery({ sha256 }).concat( + createHttpLink({ fetch: fetcher } as any), + ); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, success] = fetch.mock.calls[0]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + execute(link, { query, variables }).subscribe(secondResult => { + expect(secondResult.data).toEqual(data); + + const [, success] = fetch.mock.calls[1]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + done(); + }, done.fail); + }, done.fail); + }); + + it('handles a 400 network error and still retries', done => { + let failed = false; + fetch.mockResponseOnce(response); + + // mock it again so we can verify it doesn't try anymore + fetch.mockResponseOnce(response); + + const fetcher = (...args: any[]) => { + if (!failed) { + failed = true; + return Promise.resolve({ + json: () => Promise.resolve('This will blow up'), + text: () => Promise.resolve('THIS WILL BLOW UP'), + status: 400, + }); + } + + return fetch(...args); + }; + const link = createPersistedQuery({ sha256 }).concat( + createHttpLink({ fetch: fetcher } as any), + ); + + execute(link, { query, variables }).subscribe(result => { + expect(result.data).toEqual(data); + const [, success] = fetch.mock.calls[0]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + execute(link, { query, variables }).subscribe(secondResult => { + expect(secondResult.data).toEqual(data); + + const [, success] = fetch.mock.calls[1]; + expect(JSON.parse(success!.body!.toString()).query).toBe(queryString); + expect(JSON.parse(success!.body!.toString()).extensions).toBeUndefined(); + done(); + }, done.fail); + }, done.fail); + }); + + it('only retries a 400 network error once', done => { + let fetchCalls = 0; + const fetcher = () => { + fetchCalls++; + return Promise.resolve({ + json: () => Promise.resolve('This will blow up'), + text: () => Promise.resolve('THIS WILL BLOW UP'), + status: 400, + }); + }; + const link = createPersistedQuery({ sha256 }).concat( + createHttpLink({ fetch: fetcher } as any), + ); + + execute(link, { query, variables }).subscribe( + result => done.fail, + error => { + expect(fetchCalls).toBe(2); + done(); + }, + ); + }); +}); diff --git a/src/link/persisted-queries/__tests__/react.tsx b/src/link/persisted-queries/__tests__/react.tsx new file mode 100644 index 00000000000..5c458cd59e1 --- /dev/null +++ b/src/link/persisted-queries/__tests__/react.tsx @@ -0,0 +1,133 @@ +import React from 'react'; +import ReactDOM from 'react-dom/server'; +import gql from 'graphql-tag'; +import { print } from 'graphql'; +import { sha256 } from 'crypto-hash'; +import fetch from 'jest-fetch-mock'; + +import { ApolloProvider } from '../../../react/context'; +import { InMemoryCache as Cache } from '../../../cache/inmemory/inMemoryCache'; +import { ApolloClient } from '../../../core/ApolloClient'; +import { createHttpLink } from '../../http/createHttpLink'; +import { graphql } from '../../../react/hoc/graphql'; +import { getDataFromTree } from '../../../react/ssr/getDataFromTree'; +import { createPersistedQueryLink as createPersistedQuery, VERSION } from '../'; + +global.fetch = fetch; + +const query = gql` + query Test($filter: FilterObject) { + foo(filter: $filter) { + bar + } + } +`; + +const variables = { + filter: { + $filter: 'smash', + }, +}; +const variables2 = { + filter: null, +}; +const data = { + foo: { bar: true }, +}; +const data2 = { + foo: { bar: false }, +}; +const response = JSON.stringify({ data }); +const response2 = JSON.stringify({ data: data2 }); +const queryString = print(query); + +let hash: string; +(async () => { + hash = await sha256(queryString); +})(); + +describe('react application', () => { + beforeEach(fetch.mockReset); + it('works on a simple tree', async () => { + fetch.mockResponseOnce(response); + fetch.mockResponseOnce(response2); + + const link = createPersistedQuery({ sha256 }).concat(createHttpLink()); + + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + ssrMode: true, + }); + + const Query = graphql(query)(({ data, children }) => { + if (data!.loading) return null; + + return ( +
+ {(data as any).foo.bar && 'data was returned!'} + {children} +
+ ); + }); + const app = ( + + +

Hello!

+
+
+ ); + + // preload all the data for client side request (with filter) + const result = await getDataFromTree(app); + expect(result).toContain('data was returned'); + let [, request] = fetch.mock.calls[0]; + expect(request!.body).toBe( + JSON.stringify({ + operationName: 'Test', + variables, + extensions: { + persistedQuery: { + version: VERSION, + sha256Hash: hash, + }, + }, + }), + ); + + // reset client and try with different input object + const client2 = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + ssrMode: true, + }); + + const app2 = ( + + +

Hello!

+
+
+ ); + + // change filter object to different variables and SSR + await getDataFromTree(app2); + const markup2 = ReactDOM.renderToString(app2); + + let [, request2] = fetch.mock.calls[1]; + + expect(markup2).not.toContain('data was returned'); + expect(request2!.body).toBe( + JSON.stringify({ + operationName: 'Test', + variables: variables2, + extensions: { + persistedQuery: { + version: VERSION, + sha256Hash: hash, + }, + }, + }), + ); + }); +}); diff --git a/src/link/persisted-queries/index.ts b/src/link/persisted-queries/index.ts new file mode 100644 index 00000000000..362fa1e9935 --- /dev/null +++ b/src/link/persisted-queries/index.ts @@ -0,0 +1,257 @@ +import { print } from 'graphql/language/printer'; +import { + DocumentNode, + ExecutionResult, + GraphQLError, +} from 'graphql'; +import { invariant } from 'ts-invariant'; + +import { ApolloLink, Operation } from '../core'; +import { Observable, Observer, compact } from '../../utilities'; + +export const VERSION = 1; + +export interface ErrorResponse { + graphQLErrors?: readonly GraphQLError[]; + networkError?: Error; + response?: ExecutionResult; + operation: Operation; +} + +type SHA256Function = (...args: any[]) => string | PromiseLike; +type GenerateHashFunction = (document: DocumentNode) => string | PromiseLike; + +namespace PersistedQueryLink { + interface BaseOptions { + disable?: (error: ErrorResponse) => boolean; + useGETForHashedQueries?: boolean; + }; + + interface SHA256Options extends BaseOptions { + sha256: SHA256Function; + generateHash?: never; + }; + + interface GenerateHashOptions extends BaseOptions { + sha256?: never; + generateHash: GenerateHashFunction; + }; + + export type Options = SHA256Options | GenerateHashOptions; +} + +const defaultOptions = { + disable: ({ graphQLErrors, operation }: ErrorResponse) => { + // if the server doesn't support persisted queries, don't try anymore + if ( + graphQLErrors && + graphQLErrors.some( + ({ message }) => message === 'PersistedQueryNotSupported', + ) + ) { + return true; + } + + const { response } = operation.getContext(); + // if the server responds with bad request + // apollo-server responds with 400 for GET and 500 for POST when no query is found + if ( + response && + response.status && + (response.status === 400 || response.status === 500) + ) { + return true; + } + + return false; + }, + useGETForHashedQueries: false, +}; + +function operationDefinesMutation(operation: Operation) { + return operation.query.definitions.some( + d => d.kind === 'OperationDefinition' && d.operation === 'mutation'); +} + +const { hasOwnProperty } = Object.prototype; + +const hashesByQuery = new WeakMap< + DocumentNode, + Record> +>(); + +let nextHashesChildKey = 0; + +export const createPersistedQueryLink = ( + options: PersistedQueryLink.Options, +) => { + // Ensure a SHA-256 hash function is provided, if a custom hash + // generation function is not provided. We don't supply a SHA-256 hash + // function by default, to avoid forcing one as a dependency. Developers + // should pick the most appropriate SHA-256 function (sync or async) for + // their needs/environment, or provide a fully custom hash generation + // function (via the `generateHash` option) if they want to handle + // hashing with something other than SHA-256. + invariant( + options && ( + typeof options.sha256 === 'function' || + typeof options.generateHash === 'function' + ), + 'Missing/invalid "sha256" or "generateHash" function. Please ' + + 'configure one using the "createPersistedQueryLink(options)" options ' + + 'parameter.' + ); + + const { + sha256, + // If both a `sha256` and `generateHash` option are provided, the + // `sha256` option will be ignored. Developers can configure and + // use any hashing approach they want in a custom `generateHash` + // function; they aren't limited to SHA-256. + generateHash = (query: DocumentNode) => + Promise.resolve(sha256!(print(query))), + disable, + useGETForHashedQueries + } = compact(defaultOptions, options); + + let supportsPersistedQueries = true; + + const hashesChildKey = 'forLink' + nextHashesChildKey++; + + const getHashPromise = (query: DocumentNode) => + new Promise(resolve => resolve(generateHash(query))); + + function getQueryHash(query: DocumentNode): Promise { + if (!query || typeof query !== 'object') { + // If the query is not an object, we won't be able to store its hash as + // a property of query[hashesKey], so we let generateHash(query) decide + // what to do with the bogus query. + return getHashPromise(query); + } + let hashes = hashesByQuery.get(query)!; + if (!hashes) hashesByQuery.set(query, hashes = Object.create(null)); + return hasOwnProperty.call(hashes, hashesChildKey) + ? hashes[hashesChildKey] + : hashes[hashesChildKey] = getHashPromise(query); + } + + return new ApolloLink((operation, forward) => { + invariant( + forward, + 'PersistedQueryLink cannot be the last link in the chain.' + ); + + const { query } = operation; + + return new Observable((observer: Observer) => { + let subscription: ZenObservable.Subscription; + let retried = false; + let originalFetchOptions: any; + let setFetchOptions = false; + const retry = ( + { + response, + networkError, + }: { response?: ExecutionResult; networkError?: Error }, + cb: () => void, + ) => { + if (!retried && ((response && response.errors) || networkError)) { + retried = true; + + const disablePayload = { + response, + networkError, + operation, + graphQLErrors: response ? response.errors : undefined, + }; + + // if the server doesn't support persisted queries, don't try anymore + supportsPersistedQueries = !disable(disablePayload); + + // if its not found, we can try it again, otherwise just report the error + if ( + (response && + response.errors && + response.errors.some( + ({ message }: { message: string }) => + message === 'PersistedQueryNotFound', + )) || + !supportsPersistedQueries + ) { + // need to recall the link chain + if (subscription) subscription.unsubscribe(); + // actually send the query this time + operation.setContext({ + http: { + includeQuery: true, + includeExtensions: supportsPersistedQueries, + }, + }); + if (setFetchOptions) { + operation.setContext({ fetchOptions: originalFetchOptions }); + } + subscription = forward(operation).subscribe(handler); + + return; + } + } + cb(); + }; + const handler = { + next: (response: ExecutionResult) => { + retry({ response }, () => observer.next!(response)); + }, + error: (networkError: Error) => { + retry({ networkError }, () => observer.error!(networkError)); + }, + complete: observer.complete!.bind(observer), + }; + + // don't send the query the first time + operation.setContext({ + http: { + includeQuery: !supportsPersistedQueries, + includeExtensions: supportsPersistedQueries, + }, + }); + + // If requested, set method to GET if there are no mutations. Remember the + // original fetchOptions so we can restore them if we fall back to a + // non-hashed request. + if ( + useGETForHashedQueries && + supportsPersistedQueries && + !operationDefinesMutation(operation) + ) { + operation.setContext( + ({ fetchOptions = {} }: { fetchOptions: Record }) => { + originalFetchOptions = fetchOptions; + return { + fetchOptions: { + ...fetchOptions, + method: 'GET', + }, + }; + }, + ); + setFetchOptions = true; + } + + if (supportsPersistedQueries) { + getQueryHash(query).then((sha256Hash) => { + operation.extensions.persistedQuery = { + version: VERSION, + sha256Hash, + }; + subscription = forward(operation).subscribe(handler); + }).catch(observer.error!.bind(observer));; + } else { + subscription = forward(operation).subscribe(handler); + } + + return () => { + if (subscription) subscription.unsubscribe(); + }; + }); + }); +}; diff --git a/src/react/components/Mutation.tsx b/src/react/components/Mutation.tsx index 6a725d8ddd4..bd2ed3bb83d 100644 --- a/src/react/components/Mutation.tsx +++ b/src/react/components/Mutation.tsx @@ -11,22 +11,24 @@ export function Mutation( return props.children ? props.children(runMutation, result) : null; } -export namespace Mutation { - export const propTypes = { - mutation: PropTypes.object.isRequired, - variables: PropTypes.object, - optimisticResponse: PropTypes.oneOfType([PropTypes.object, PropTypes.func]), - refetchQueries: PropTypes.oneOfType([ - PropTypes.arrayOf( - PropTypes.oneOfType([PropTypes.string, PropTypes.object]) - ), - PropTypes.func - ]), - awaitRefetchQueries: PropTypes.bool, - update: PropTypes.func, - children: PropTypes.func.isRequired, - onCompleted: PropTypes.func, - onError: PropTypes.func, - fetchPolicy: PropTypes.string - }; +export interface Mutation { + propTypes: PropTypes.InferProps>; } + +Mutation.propTypes = { + mutation: PropTypes.object.isRequired, + variables: PropTypes.object, + optimisticResponse: PropTypes.oneOfType([PropTypes.object, PropTypes.func]), + refetchQueries: PropTypes.oneOfType([ + PropTypes.arrayOf( + PropTypes.oneOfType([PropTypes.string, PropTypes.object]) + ), + PropTypes.func + ]), + awaitRefetchQueries: PropTypes.bool, + update: PropTypes.func, + children: PropTypes.func.isRequired, + onCompleted: PropTypes.func, + onError: PropTypes.func, + fetchPolicy: PropTypes.string +}; diff --git a/src/react/components/Query.tsx b/src/react/components/Query.tsx index 0f180411ff3..f875e3d7d32 100644 --- a/src/react/components/Query.tsx +++ b/src/react/components/Query.tsx @@ -12,19 +12,21 @@ export function Query( return children && result ? children(result) : null; } -export namespace Query { - export const propTypes = { - client: PropTypes.object, - children: PropTypes.func.isRequired, - fetchPolicy: PropTypes.string, - notifyOnNetworkStatusChange: PropTypes.bool, - onCompleted: PropTypes.func, - onError: PropTypes.func, - pollInterval: PropTypes.number, - query: PropTypes.object.isRequired, - variables: PropTypes.object, - ssr: PropTypes.bool, - partialRefetch: PropTypes.bool, - returnPartialData: PropTypes.bool - }; +export interface Query { + propTypes: PropTypes.InferProps>; } + +Query.propTypes = { + client: PropTypes.object, + children: PropTypes.func.isRequired, + fetchPolicy: PropTypes.string, + notifyOnNetworkStatusChange: PropTypes.bool, + onCompleted: PropTypes.func, + onError: PropTypes.func, + pollInterval: PropTypes.number, + query: PropTypes.object.isRequired, + variables: PropTypes.object, + ssr: PropTypes.bool, + partialRefetch: PropTypes.bool, + returnPartialData: PropTypes.bool +}; diff --git a/src/react/components/Subscription.tsx b/src/react/components/Subscription.tsx index 1d3786087e4..c7962dddbd3 100644 --- a/src/react/components/Subscription.tsx +++ b/src/react/components/Subscription.tsx @@ -11,13 +11,15 @@ export function Subscription( return props.children && result ? props.children(result) : null; } -export namespace Subscription { - export const propTypes = { - subscription: PropTypes.object.isRequired, - variables: PropTypes.object, - children: PropTypes.func, - onSubscriptionData: PropTypes.func, - onSubscriptionComplete: PropTypes.func, - shouldResubscribe: PropTypes.oneOfType([PropTypes.func, PropTypes.bool]) - }; +export interface Subscription { + propTypes: PropTypes.InferProps>; } + +Subscription.propTypes = { + subscription: PropTypes.object.isRequired, + variables: PropTypes.object, + children: PropTypes.func, + onSubscriptionData: PropTypes.func, + onSubscriptionComplete: PropTypes.func, + shouldResubscribe: PropTypes.oneOfType([PropTypes.func, PropTypes.bool]) +}; diff --git a/src/react/components/types.ts b/src/react/components/types.ts index 7229777f6d7..83dea55043f 100644 --- a/src/react/components/types.ts +++ b/src/react/components/types.ts @@ -1,4 +1,5 @@ import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { OperationVariables } from '../../core'; import { @@ -16,14 +17,14 @@ export interface QueryComponentOptions< TVariables = OperationVariables > extends QueryFunctionOptions { children: (result: QueryResult) => JSX.Element | null; - query: DocumentNode; + query: DocumentNode | TypedDocumentNode; } export interface MutationComponentOptions< TData = any, TVariables = OperationVariables > extends BaseMutationOptions { - mutation: DocumentNode; + mutation: DocumentNode | TypedDocumentNode; children: ( mutateFunction: MutationFunction, result: MutationResult @@ -34,6 +35,6 @@ export interface SubscriptionComponentOptions< TData = any, TVariables = OperationVariables > extends BaseSubscriptionOptions { - subscription: DocumentNode; + subscription: DocumentNode | TypedDocumentNode; children?: null | ((result: SubscriptionResult) => JSX.Element | null); } diff --git a/src/react/data/QueryData.ts b/src/react/data/QueryData.ts index 90a24363f1c..4de10ff7c1b 100644 --- a/src/react/data/QueryData.ts +++ b/src/react/data/QueryData.ts @@ -227,14 +227,14 @@ export class QueryData extends OperationData { } private updateObservableQuery() { - if (this.getOptions().skip) return; - // If we skipped initially, we may not have yet created the observable if (!this.currentObservable) { this.initializeObservableQuery(); return; } + if (this.getOptions().skip) return; + const newObservableQueryOptions = { ...this.prepareObservableQueryOptions(), children: null @@ -457,7 +457,7 @@ export class QueryData extends OperationData { this.currentObservable!.refetch(variables); private obsFetchMore = ( - fetchMoreOptions: FetchMoreQueryOptions & + fetchMoreOptions: FetchMoreQueryOptions & FetchMoreOptions ) => this.currentObservable!.fetchMore(fetchMoreOptions); diff --git a/src/react/hoc/types.ts b/src/react/hoc/types.ts index 909878aa6f3..4c0ec8c3ec8 100644 --- a/src/react/hoc/types.ts +++ b/src/react/hoc/types.ts @@ -24,7 +24,7 @@ export interface QueryControls< loading: boolean; variables: TGraphQLVariables; fetchMore: ( - fetchMoreOptions: FetchMoreQueryOptions & + fetchMoreOptions: FetchMoreQueryOptions & FetchMoreOptions ) => Promise>; refetch: (variables?: TGraphQLVariables) => Promise>; diff --git a/src/react/hooks/__tests__/useReactiveVar.test.tsx b/src/react/hooks/__tests__/useReactiveVar.test.tsx new file mode 100644 index 00000000000..2cef7bfe836 --- /dev/null +++ b/src/react/hooks/__tests__/useReactiveVar.test.tsx @@ -0,0 +1,183 @@ +import React from "react"; +import { render, wait, act } from "@testing-library/react"; + +import { itAsync } from "../../../testing"; +import { makeVar } from "../../../core"; +import { useReactiveVar } from "../useReactiveVar"; + +describe("useReactiveVar Hook", () => { + itAsync("works with one component", (resolve, reject) => { + const counterVar = makeVar(0); + let renderCount = 0; + + function Component() { + const count = useReactiveVar(counterVar); + + switch (++renderCount) { + case 1: + expect(count).toBe(0); + act(() => { + counterVar(count + 1); + }); + break; + case 2: + expect(count).toBe(1); + act(() => { + counterVar(counterVar() + 2); + }); + break; + case 3: + expect(count).toBe(3); + break; + default: + reject(`too many (${renderCount}) renders`); + } + + return null; + } + + render(); + + return wait(() => { + expect(renderCount).toBe(3); + expect(counterVar()).toBe(3); + }).then(resolve, reject); + }); + + itAsync("works when two components share a variable", async (resolve, reject) => { + const counterVar = makeVar(0); + + let parentRenderCount = 0; + function Parent() { + const count = useReactiveVar(counterVar); + + switch (++parentRenderCount) { + case 1: + expect(count).toBe(0); + break; + case 2: + expect(count).toBe(1); + break; + case 3: + expect(count).toBe(11); + break; + default: + reject(`too many (${parentRenderCount}) parent renders`); + } + + return ; + } + + let childRenderCount = 0; + function Child() { + const count = useReactiveVar(counterVar); + + switch (++childRenderCount) { + case 1: + expect(count).toBe(0); + break; + case 2: + expect(count).toBe(1); + break; + case 3: + expect(count).toBe(11); + break; + default: + reject(`too many (${childRenderCount}) child renders`); + } + + return null; + } + + render(); + + await wait(() => { + expect(parentRenderCount).toBe(1); + expect(childRenderCount).toBe(1); + }); + + expect(counterVar()).toBe(0); + act(() => { + counterVar(1); + }); + + await wait(() => { + expect(parentRenderCount).toBe(2); + expect(childRenderCount).toBe(2); + }); + + expect(counterVar()).toBe(1); + act(() => { + counterVar(counterVar() + 10); + }); + + await wait(() => { + expect(parentRenderCount).toBe(3); + expect(childRenderCount).toBe(3); + }); + + expect(counterVar()).toBe(11); + + resolve(); + }); + + itAsync("does not update if component has been unmounted", (resolve, reject) => { + const counterVar = makeVar(0); + let renderCount = 0; + let attemptedUpdateAfterUnmount = false; + + function Component() { + const count = useReactiveVar(counterVar); + + switch (++renderCount) { + case 1: + expect(count).toBe(0); + act(() => { + counterVar(count + 1); + }); + break; + case 2: + expect(count).toBe(1); + act(() => { + counterVar(counterVar() + 2); + }); + break; + case 3: + expect(count).toBe(3); + setTimeout(() => { + unmount(); + setTimeout(() => { + counterVar(counterVar() * 2); + attemptedUpdateAfterUnmount = true; + }, 10); + }, 10); + break; + default: + reject(`too many (${renderCount}) renders`); + } + + return null; + } + + // To detect updates of unmounted components, we have to monkey-patch + // the console.error method. + const consoleErrorArgs: any[][] = []; + const { error } = console; + console.error = function (...args: any[]) { + consoleErrorArgs.push(args); + return error.apply(this, args); + }; + + const { unmount } = render(); + + return wait(() => { + expect(attemptedUpdateAfterUnmount).toBe(true); + }).then(() => { + expect(renderCount).toBe(3); + expect(counterVar()).toBe(6); + expect(consoleErrorArgs).toEqual([]); + }).finally(() => { + console.error = error; + }).then(resolve, reject); + }); +}); diff --git a/src/react/hooks/index.ts b/src/react/hooks/index.ts index b301bfed59d..a9a323f7fc1 100644 --- a/src/react/hooks/index.ts +++ b/src/react/hooks/index.ts @@ -3,3 +3,4 @@ export * from './useLazyQuery'; export * from './useMutation'; export * from './useQuery'; export * from './useSubscription'; +export * from './useReactiveVar'; diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts index 987622915e9..8032639ad98 100644 --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -1,11 +1,12 @@ import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { LazyQueryHookOptions, QueryTuple } from '../types/types'; import { useBaseQuery } from './utils/useBaseQuery'; import { OperationVariables } from '../../core'; export function useLazyQuery( - query: DocumentNode, + query: DocumentNode | TypedDocumentNode, options?: LazyQueryHookOptions ) { return useBaseQuery(query, options, true) as QueryTuple< diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts index fbc2202e4b8..28f96618c0f 100644 --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -1,5 +1,6 @@ import { useContext, useState, useRef, useEffect } from 'react'; import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { MutationHookOptions, MutationTuple } from '../types/types'; import { MutationData } from '../data'; @@ -7,7 +8,7 @@ import { OperationVariables } from '../../core'; import { getApolloContext } from '../context'; export function useMutation( - mutation: DocumentNode, + mutation: DocumentNode | TypedDocumentNode, options?: MutationHookOptions ): MutationTuple { const context = useContext(getApolloContext()); diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts index ae226cebc91..bf75ad0e323 100644 --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -1,11 +1,12 @@ import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { QueryHookOptions, QueryResult } from '../types/types'; import { useBaseQuery } from './utils/useBaseQuery'; import { OperationVariables } from '../../core'; export function useQuery( - query: DocumentNode, + query: DocumentNode | TypedDocumentNode, options?: QueryHookOptions ) { return useBaseQuery(query, options, false) as QueryResult< diff --git a/src/react/hooks/useReactiveVar.ts b/src/react/hooks/useReactiveVar.ts new file mode 100644 index 00000000000..4bb277413be --- /dev/null +++ b/src/react/hooks/useReactiveVar.ts @@ -0,0 +1,15 @@ +import { useState, useEffect } from 'react'; +import { ReactiveVar } from '../../core'; + +export function useReactiveVar(rv: ReactiveVar): T { + const value = rv(); + // We don't actually care what useState thinks the value of the variable + // is, so we take only the update function from the returned array. + const mute = rv.onNextChange(useState(value)[1]); + // Once the component is unmounted, ignore future updates. Note that the + // useEffect function returns the mute function without calling it, + // allowing it to be called when the component unmounts. This is + // equivalent to useEffect(() => () => mute(), []), but shorter. + useEffect(() => mute, []); + return value; +} diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts index aa4b6f1e372..d553b9fbef3 100644 --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -1,5 +1,6 @@ import { useContext, useState, useRef, useEffect } from 'react'; import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { SubscriptionHookOptions } from '../types/types'; import { SubscriptionData } from '../data'; @@ -7,7 +8,7 @@ import { OperationVariables } from '../../core'; import { getApolloContext } from '../context'; export function useSubscription( - subscription: DocumentNode, + subscription: DocumentNode | TypedDocumentNode, options?: SubscriptionHookOptions ) { const context = useContext(getApolloContext()); diff --git a/src/react/hooks/utils/useBaseQuery.ts b/src/react/hooks/utils/useBaseQuery.ts index ac293453770..2d5083097e0 100644 --- a/src/react/hooks/utils/useBaseQuery.ts +++ b/src/react/hooks/utils/useBaseQuery.ts @@ -1,5 +1,6 @@ import { useContext, useEffect, useReducer, useRef } from 'react'; import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { QueryHookOptions, @@ -13,7 +14,7 @@ import { OperationVariables } from '../../../core'; import { getApolloContext } from '../../context'; export function useBaseQuery( - query: DocumentNode, + query: DocumentNode | TypedDocumentNode, options?: QueryHookOptions, lazy = false ) { diff --git a/src/react/types/types.ts b/src/react/types/types.ts index cfba43611df..cbd52ad284f 100644 --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -1,5 +1,6 @@ import { ReactNode } from 'react'; import { DocumentNode } from 'graphql'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; import { Observable } from '../../utilities'; import { FetchResult } from '../../link/core'; @@ -63,13 +64,14 @@ export type ObservableQueryFields = Pick< | 'variables' > & { fetchMore: (( - fetchMoreOptions: FetchMoreQueryOptions & + fetchMoreOptions: FetchMoreQueryOptions & FetchMoreOptions ) => Promise>) & (( - fetchMoreOptions: { query?: DocumentNode } & FetchMoreQueryOptions< + fetchMoreOptions: { query?: DocumentNode | TypedDocumentNode } & FetchMoreQueryOptions< TVariables2, - K + K, + TData > & FetchMoreOptions ) => Promise>); @@ -88,24 +90,24 @@ export interface QueryResult export interface QueryDataOptions extends QueryFunctionOptions { children?: (result: QueryResult) => ReactNode; - query: DocumentNode; + query: DocumentNode | TypedDocumentNode; } export interface QueryHookOptions extends QueryFunctionOptions { - query?: DocumentNode; + query?: DocumentNode | TypedDocumentNode; } export interface LazyQueryHookOptions< TData = any, TVariables = OperationVariables > extends Omit, 'skip'> { - query?: DocumentNode; + query?: DocumentNode | TypedDocumentNode; } export interface QueryPreviousData { client?: ApolloClient; - query?: DocumentNode; + query?: DocumentNode | TypedDocumentNode; observableQueryOptions?: {}; result?: QueryResult | null; loading?: boolean; @@ -203,12 +205,12 @@ export interface MutationHookOptions< TData = any, TVariables = OperationVariables > extends BaseMutationOptions { - mutation?: DocumentNode; + mutation?: DocumentNode | TypedDocumentNode; } export interface MutationDataOptions extends BaseMutationOptions { - mutation: DocumentNode; + mutation: DocumentNode | TypedDocumentNode; } export type MutationTuple = [ @@ -250,14 +252,14 @@ export interface SubscriptionHookOptions< TData = any, TVariables = OperationVariables > extends BaseSubscriptionOptions { - subscription?: DocumentNode; + subscription?: DocumentNode | TypedDocumentNode; } export interface SubscriptionDataOptions< TData = any, TVariables = OperationVariables > extends BaseSubscriptionOptions { - subscription: DocumentNode; + subscription: DocumentNode | TypedDocumentNode; children?: null | ((result: SubscriptionResult) => JSX.Element | null); }