diff --git a/src/cache/inmemory/fragmentRegistry.ts b/src/cache/inmemory/fragmentRegistry.ts index 12cade01aea..47e434bee75 100644 --- a/src/cache/inmemory/fragmentRegistry.ts +++ b/src/cache/inmemory/fragmentRegistry.ts @@ -9,7 +9,8 @@ import { visit } from "graphql"; import { wrap } from "optimism"; import type { FragmentMap } from "../../utilities/index.js"; -import { getFragmentDefinitions } from "../../utilities/index.js"; +import { cacheSizes, getFragmentDefinitions } from "../../utilities/index.js"; +import { WeakCache } from "@wry/caches"; export interface FragmentRegistryAPI { register(...fragments: DocumentNode[]): this; @@ -68,11 +69,23 @@ class FragmentRegistry implements FragmentRegistryAPI { const proto = FragmentRegistry.prototype; this.invalidate = (this.lookup = wrap(proto.lookup.bind(this), { makeCacheKey: (arg) => arg, + max: cacheSizes.fragmentRegistryLookup, })).dirty; // This dirty function is bound to the wrapped lookup method. - this.transform = wrap(proto.transform.bind(this)); - this.findFragmentSpreads = wrap(proto.findFragmentSpreads.bind(this)); + this.transform = wrap(proto.transform.bind(this), { + cache: WeakCache, + max: cacheSizes.fragmentRegistryTransform, + }); + this.findFragmentSpreads = wrap(proto.findFragmentSpreads.bind(this), { + cache: WeakCache, + max: cacheSizes.fragmentRegistryFindFragmentSpreads, + }); } + /* + * Note: + * This method is only memoized so it can serve a a dependency to `tranform`, + * so calling `invalidate` will invalidate cache entries for `transform`. + */ public lookup(fragmentName: string): FragmentDefinitionNode | null { return this.registry[fragmentName] || null; } diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts index 84e01b89023..8af3412db60 100644 --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -4,7 +4,6 @@ import type { DocumentNode } from "graphql"; // TODO(brian): A hack until this issue is resolved (https://github.com/graphql/graphql-js/issues/3356) type OperationTypeNode = any; import { equal } from "@wry/equality"; -import { WeakCache } from "@wry/caches"; import type { ApolloLink, FetchResult } from "../link/core/index.js"; import { execute } from "../link/core/index.js"; @@ -100,6 +99,7 @@ interface TransformCacheEntry { import type { DefaultOptions } from "./ApolloClient.js"; import { Trie } from "@wry/trie"; +import { CleanWeakCache, cacheSizes } from "../utilities/index.js"; export class QueryManager { public cache: ApolloCache; @@ -662,10 +662,10 @@ export class QueryManager { return this.documentTransform.transformDocument(document); } - private transformCache = new WeakCache< + private transformCache = new CleanWeakCache< DocumentNode, TransformCacheEntry - >(/** TODO: decide on a maximum size (will do all max sizes in a combined separate PR) */); + >(cacheSizes.queryManagerTransforms); public getDocumentInfo(document: DocumentNode) { const { transformCache } = this; diff --git a/src/link/persisted-queries/index.ts b/src/link/persisted-queries/index.ts index 95c4129c802..2514df5fc4b 100644 --- a/src/link/persisted-queries/index.ts +++ b/src/link/persisted-queries/index.ts @@ -12,7 +12,7 @@ import type { import { Observable, compact, isNonEmptyArray } from "../../utilities/index.js"; import type { NetworkError } from "../../errors/index.js"; import type { ServerError } from "../utils/index.js"; -import { WeakCache } from "@wry/caches"; +import { cacheSizes, CleanWeakCache } from "../../utilities/index.js"; export const VERSION = 1; @@ -94,7 +94,7 @@ function operationDefinesMutation(operation: Operation) { export const createPersistedQueryLink = ( options: PersistedQueryLink.Options ) => { - let hashesByQuery: WeakCache> | undefined; + let hashesByQuery: CleanWeakCache> | undefined; function resetHashCache() { hashesByQuery = undefined; } @@ -140,8 +140,7 @@ export const createPersistedQueryLink = ( return getHashPromise(query); } if (!hashesByQuery) { - hashesByQuery = - new WeakCache(/** TODO: decide on a maximum size (will do all max sizes in a combined separate PR) */); + hashesByQuery = new CleanWeakCache(cacheSizes.persistedQueryHashes); } let hash = hashesByQuery.get(query)!; if (!hash) hashesByQuery.set(query, (hash = getHashPromise(query))); diff --git a/src/react/parser/index.ts b/src/react/parser/index.ts index 3c5dc3abc8d..9aed9eeb05c 100644 --- a/src/react/parser/index.ts +++ b/src/react/parser/index.ts @@ -1,4 +1,3 @@ -import { WeakCache } from "@wry/caches"; import { invariant } from "../../utilities/globals/index.js"; import type { @@ -7,6 +6,7 @@ import type { VariableDefinitionNode, OperationDefinitionNode, } from "graphql"; +import { CleanWeakCache, cacheSizes } from "../../utilities/index.js"; export enum DocumentType { Query, @@ -22,7 +22,7 @@ export interface IDocumentDefinition { let cache: | undefined - | WeakCache< + | CleanWeakCache< DocumentNode, { name: string; @@ -50,8 +50,7 @@ export function operationName(type: DocumentType) { // This parser is mostly used to safety check incoming documents. export function parser(document: DocumentNode): IDocumentDefinition { if (!cache) { - cache = - new WeakCache(/** TODO: decide on a maximum size (will do all max sizes in a combined separate PR) */); + cache = new CleanWeakCache(cacheSizes.parser); } const cached = cache.get(document); if (cached) return cached; diff --git a/src/utilities/caching/caches.ts b/src/utilities/caching/caches.ts new file mode 100644 index 00000000000..808768fc959 --- /dev/null +++ b/src/utilities/caching/caches.ts @@ -0,0 +1,47 @@ +import type { CommonCache } from "@wry/caches"; +import { WeakCache, StrongCache } from "@wry/caches"; + +const scheduledCleanup = new WeakSet>(); +function schedule(cache: CommonCache) { + if (!scheduledCleanup.has(cache)) { + scheduledCleanup.add(cache); + setTimeout(() => { + cache.clean(); + scheduledCleanup.delete(cache); + }, 1000); + } +} +/** + * A version of WeakCache that will auto-schedule a cleanup of the cache when + * a new item is added. + * Throttled to once per second. + * + * @privateRemarks + * Should be used throughout the rest of the codebase instead of WeakCache, + * with the notable exception of usage in `wrap` from `optimism` - that one + * already handles cleanup and should remain a `WeakCache`. + */ + +export class CleanWeakCache extends WeakCache { + set(key: K, value: V) { + schedule(this); + return super.set(key, value); + } +} +/** + * A version of StrongCache that will auto-schedule a cleanup of the cache when + * a new item is added. + * Throttled to once per second. + * + * @privateRemarks + * Should be used throughout the rest of the codebase instead of StrongCache, + * with the notable exception of usage in `wrap` from `optimism` - that one + * already handles cleanup and should remain a `StrongCache`. + */ + +export class CleanStrongCache extends StrongCache { + set(key: K, value: V) { + schedule(this); + return super.set(key, value); + } +} diff --git a/src/utilities/caching/index.ts b/src/utilities/caching/index.ts new file mode 100644 index 00000000000..4fc49c5dbab --- /dev/null +++ b/src/utilities/caching/index.ts @@ -0,0 +1,2 @@ +export { CleanStrongCache, CleanWeakCache } from "./caches.js"; +export { cacheSizes } from "./sizes.js"; diff --git a/src/utilities/caching/sizes.ts b/src/utilities/caching/sizes.ts new file mode 100644 index 00000000000..b1501ad5622 --- /dev/null +++ b/src/utilities/caching/sizes.ts @@ -0,0 +1,193 @@ +import { global } from "../globals/index.js"; + +declare global { + interface Window { + [cacheSizeSymbol]?: Partial; + } +} + +/** + * The cache sizes used by various Apollo Client caches. + * + * Note that these caches are all derivative and if an item is cache-collected, + * it's not the end of the world - the cached item will just be recalculated. + * + * As a result, these cache sizes should not be chosen to hold every value ever + * encountered, but rather to hold a reasonable number of values that can be + * assumed to be on the screen at any given time. + * + * We assume a "base value" of 1000 here, which is already very generous. + * In most applications, it will be very unlikely that 1000 different queries + * are on screen at the same time. + */ +interface CacheSizes { + /** + * Cache size for the [`print`](../../utilities/graphql/print.ts) function. + * + * @defaultValue + * Defaults to `2000`. + * + * @remarks + * This method is called from the `QueryManager` and various `Link`s, + * always with the "serverQuery", so the server-facing part of a transformed + * DocumentNode. + */ + print: number; + /** + * Cache size for the [`parser`](../../react/parser/index.ts) function. + * + * @defaultValue + * Defaults to `1000`. + * + * @remarks + * This function is used directly in HOCs, and nowadays mainly accessed by + * calling `verifyDocumentType` from various hooks. + * It is called with a user-provided DocumentNode. + */ + parser: number; + /** + * Cache size for the `performWork` method of each [`DocumentTransform`](../../utilities/graphql/DocumentTransform.ts). + * + * @defaultValue + * Defaults to `2000`. + * + * @remarks + * This method is called from `transformDocument`, which is called from + * `QueryManager` with a user-provided DocumentNode. + * It is also called with already-transformed DocumentNodes, assuming the + * user provided additional transforms. + * + * The cache size here should be chosen with other DocumentTransforms in mind. + * For example, if there was a DocumentTransform that would take `n` DocumentNodes, + * and returned a differently-transformed DocumentNode depending if the app is + * online or offline, then we assume that the cache returns `2*n` documents. + * + * No user-provided DocumentNode will actually be "the last one", as we run the + * `defaultDocumentTransform` before *and* after the user-provided transforms. + * + * So if we assume that the user-provided transforms receive `n` documents and + * return `n` documents, the cache size should be `2*n`. + * + * If we assume that the user-provided transforms receive `n` documents and + * returns `2*n` documents, the cache size should be `3*n`. + * + * This size should also then be used in every other cache that mentions that + * it operates on a "transformed" DocumentNode. + */ + documentTransform: number; + /** + * Cache size for the `transformCache` used in the `getDocumentInfo` method of + * [`QueryManager`](../../core/QueryManager.ts). + * + * @defaultValue + * Defaults to `2000`. + * + * @remarks + * `getDocumentInfo` is called throughout the `QueryManager` with transformed + * DocumentNodes. + */ + queryManagerTransforms: number; + /** + * Cache size for the `hashesByQuery` cache in the [`PersistedQueryLink`](../../link/persisted-queries/index.ts). + * + * @defaultValue + * Defaults to `2000`. + * + * @remarks + * This cache is used to cache the hashes of persisted queries. It is working with + * transformed DocumentNodes. + */ + persistedQueryHashes: number; + /** + * Cache for the `sortingMap` used by [`canonicalStringify`](../../utilities/common/canonicalStringify.ts). + * + * @defaultValue + * Defaults to `1000`. + * + * @remarks + * This cache contains the sorted keys of objects that are stringified by + * `canonicalStringify`. + * It uses the stringified unsorted keys of objects as keys. + * The cache will not grow beyond the size of different object **shapes** + * encountered in an application, no matter how much actual data gets stringified. + */ + canonicalStringify: number; + /** + * Cache size for the `transform` method of [`FragmentRegistry`](../../cache/inmemory/fragmentRegistry.ts). + * + * @defaultValue + * Defaults to `2000`. + * + * @remarks + * This function is called as part of the `defaultDocumentTransform` which will be called with + * user-provided and already-transformed DocumentNodes. + * + */ + fragmentRegistryTransform: number; + /** + * Cache size for the `lookup` method of [`FragmentRegistry`](../../cache/inmemory/fragmentRegistry.ts). + * + * @defaultValue + * Defaults to `1000`. + * + * @remarks + * This function is called with fragment names in the form of a string. + * + * Note: + * This function is a dependency of `transform`, so having a too small cache size here + * might involuntarily invalidate values in the `transform` cache. + */ + fragmentRegistryLookup: number; + /** + * Cache size for the `findFragmentSpreads` method of [`FragmentRegistry`](../../cache/inmemory/fragmentRegistry.ts). + * + * @defaultValue + * Defaults to `4000`. + * + * @remarks + * This function is called with transformed DocumentNodes, as well as recursively + * with every fragment spread referenced within that, or a fragment referenced by a + * fragment spread. + * + * Note: + * This function is a dependency of `transform`, so having a too small cache size here + * might involuntarily invalidate values in the `transform` cache. + */ + fragmentRegistryFindFragmentSpreads: number; +} + +const cacheSizeSymbol = Symbol.for("apollo.cacheSize"); +/** + * + * The global cache size configuration for Apollo Client. + * + * @remark + * + * You can directly modify this object, but any modification will + * only have an effect on caches that are created after the modification. + * + * So for global caches, such as `parser`, `canonicalStringify` and `print`, + * you might need to call `.reset` on them, which will essentially re-create them. + * + * Alternatively, you can set `globalThis[Symbol.for("apollo.cacheSize")]` before + * you load the Apollo Client package: + * + * @example + * ```ts + * globalThis[Symbol.for("apollo.cacheSize")] = { + * parser: 100 + * } + * ``` + */ +export const cacheSizes: CacheSizes = { + parser: 1000, + canonicalStringify: 1000, + print: 2000, + documentTransform: 2000, + queryManagerTransforms: 2000, + persistedQueryHashes: 2000, + fragmentRegistryTransform: 2000, + fragmentRegistryLookup: 1000, + fragmentRegistryFindFragmentSpreads: 4000, + ...global[cacheSizeSymbol], +}; diff --git a/src/utilities/common/canonicalStringify.ts b/src/utilities/common/canonicalStringify.ts index 021f23430e2..239ec8496aa 100644 --- a/src/utilities/common/canonicalStringify.ts +++ b/src/utilities/common/canonicalStringify.ts @@ -1,3 +1,5 @@ +import { CleanStrongCache } from "../../utilities/index.js"; + /** * Like JSON.stringify, but with object keys always sorted in the same order. * @@ -24,14 +26,15 @@ export const canonicalStringify = Object.assign( // Clearing the sortingMap will reclaim all cached memory, without // affecting the logical results of canonicalStringify, but potentially // sacrificing performance until the cache is refilled. - sortingMap.clear(); + sortingMap = new CleanStrongCache(); }, } ); // Values are JSON-serialized arrays of object keys (in any order), and values // are sorted arrays of the same keys. -const sortingMap = new Map(); +let sortingMap!: CleanStrongCache; +canonicalStringify.reset(); // The JSON.stringify function takes an optional second argument called a // replacer function. This function is called for each key-value pair in the diff --git a/src/utilities/graphql/DocumentTransform.ts b/src/utilities/graphql/DocumentTransform.ts index 7a5ce40fe4c..c32df389ddc 100644 --- a/src/utilities/graphql/DocumentTransform.ts +++ b/src/utilities/graphql/DocumentTransform.ts @@ -5,6 +5,7 @@ import { invariant } from "../globals/index.js"; import type { DocumentNode } from "graphql"; import { WeakCache } from "@wry/caches"; import { wrap } from "optimism"; +import { cacheSizes } from "../../utilities/index.js"; export type DocumentTransformCacheKey = ReadonlyArray; @@ -96,7 +97,7 @@ export class DocumentTransform { return stableCacheKeys.lookupArray(cacheKeys); } }, - max: 1000 /** TODO: decide on a maximum size (will do all max sizes in a combined separate PR) */, + max: cacheSizes.documentTransform, cache: WeakCache, } ); diff --git a/src/utilities/graphql/print.ts b/src/utilities/graphql/print.ts index 3ba1134c968..f8474aa00ec 100644 --- a/src/utilities/graphql/print.ts +++ b/src/utilities/graphql/print.ts @@ -1,8 +1,8 @@ import type { ASTNode } from "graphql"; import { print as origPrint } from "graphql"; -import { WeakCache } from "@wry/caches"; +import { CleanWeakCache, cacheSizes } from "../../utilities/index.js"; -let printCache!: WeakCache; +let printCache!: CleanWeakCache; export const print = Object.assign( (ast: ASTNode) => { let result = printCache.get(ast); @@ -15,10 +15,7 @@ export const print = Object.assign( }, { reset() { - printCache = new WeakCache< - ASTNode, - string - >(/** TODO: decide on a maximum size (will do all max sizes in a combined separate PR) */); + printCache = new CleanWeakCache(cacheSizes.print); }, } ); diff --git a/src/utilities/index.ts b/src/utilities/index.ts index 35c1ec45cad..541bd5693a9 100644 --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -131,3 +131,9 @@ export * from "./types/IsStrictlyAny.js"; export type { DeepOmit } from "./types/DeepOmit.js"; export type { DeepPartial } from "./types/DeepPartial.js"; export type { OnlyRequiredProperties } from "./types/OnlyRequiredProperties.js"; + +export { + CleanStrongCache, + CleanWeakCache, + cacheSizes, +} from "./caching/index.js";