From 131ee7aa202746563ebc199d7d124dbace31d645 Mon Sep 17 00:00:00 2001 From: Emiliano Sanchez Date: Wed, 9 Oct 2024 12:13:20 -0300 Subject: [PATCH] revert data_loader_for_ssr --- CHANGES.txt | 2 - .../__tests__/sdkClientMethodCS.spec.ts | 1 - src/sdkFactory/index.ts | 4 +- src/sdkFactory/types.ts | 1 - src/storages/__tests__/dataLoader.spec.ts | 31 ------ src/storages/dataLoader.ts | 94 +++++++------------ src/storages/inMemory/InMemoryStorageCS.ts | 23 +---- src/storages/types.ts | 2 + src/trackers/eventTracker.ts | 2 +- src/trackers/impressionsTracker.ts | 2 +- src/types.ts | 17 ++-- 11 files changed, 50 insertions(+), 129 deletions(-) delete mode 100644 src/storages/__tests__/dataLoader.spec.ts diff --git a/CHANGES.txt b/CHANGES.txt index 0ab19c90..4c333159 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,8 +1,6 @@ 2.0.0 (October XX, 2024) - Added support for targeting rules based on large segments. - Added `factory.destroy()` method, which invokes the `destroy` method on all SDK clients created by the factory. - - Added `factory.getState()` method for standalone server-side SDKs, which returns the rollout plan snapshot from the storage. - - Added `preloadedData` configuration option for standalone client-side SDKs, which allows preloading the SDK storage with a snapshot of the rollout plan. - Updated internal storage factory to emit the SDK_READY_FROM_CACHE event when it corresponds, to clean up the initialization flow. - Updated the handling of timers and async operations by moving them into an `init` factory method to enable lazy initialization of the SDK. This update is intended for the React SDK. - Bugfixing - Fixed an issue with the server-side polling manager that caused dangling timers when the SDK was destroyed before it was ready. diff --git a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts index 1ac052aa..793b12f8 100644 --- a/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts +++ b/src/sdkClient/__tests__/sdkClientMethodCS.spec.ts @@ -47,7 +47,6 @@ const params = { settings: settingsWithKey, telemetryTracker: telemetryTrackerFactory(), clients: {}, - whenInit: (cb: () => void) => cb() }; const invalidAttributes = [ diff --git a/src/sdkFactory/index.ts b/src/sdkFactory/index.ts index 0951abd3..41706cc6 100644 --- a/src/sdkFactory/index.ts +++ b/src/sdkFactory/index.ts @@ -56,7 +56,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. readiness.splits.emit(SDK_SPLITS_CACHE_LOADED); } }); - + // @TODO add support for dataloader: `if (params.dataLoader) params.dataLoader(storage);` const clients: Record = {}; const telemetryTracker = telemetryTrackerFactory(storage.telemetry, platform.now); const integrationsManager = integrationsManagerFactory && integrationsManagerFactory({ settings, storage, telemetryTracker }); @@ -82,7 +82,7 @@ export function sdkFactory(params: ISdkFactoryParams): SplitIO.ICsSDK | SplitIO. // splitApi is used by SyncManager and Browser signal listener const splitApi = splitApiFactory && splitApiFactory(settings, platform, telemetryTracker); - const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform, whenInit }; + const ctx: ISdkFactoryContext = { clients, splitApi, eventTracker, impressionsTracker, telemetryTracker, uniqueKeysTracker, sdkReadinessManager, readiness, settings, storage, platform }; const syncManager = syncManagerFactory && syncManagerFactory(ctx as ISdkFactoryContextSync); ctx.syncManager = syncManager; diff --git a/src/sdkFactory/types.ts b/src/sdkFactory/types.ts index 774110c5..d52ce348 100644 --- a/src/sdkFactory/types.ts +++ b/src/sdkFactory/types.ts @@ -50,7 +50,6 @@ export interface ISdkFactoryContext { splitApi?: ISplitApi syncManager?: ISyncManager, clients: Record, - whenInit(cb: () => void): void } export interface ISdkFactoryContextSync extends ISdkFactoryContext { diff --git a/src/storages/__tests__/dataLoader.spec.ts b/src/storages/__tests__/dataLoader.spec.ts deleted file mode 100644 index c9f77849..00000000 --- a/src/storages/__tests__/dataLoader.spec.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { InMemoryStorageFactory } from '../inMemory/InMemoryStorage'; -import { InMemoryStorageCSFactory } from '../inMemory/InMemoryStorageCS'; -import { fullSettings } from '../../utils/settingsValidation/__tests__/settings.mocks'; - -import * as dataLoader from '../dataLoader'; - -test('loadData & getSnapshot', () => { - jest.spyOn(dataLoader, 'loadData'); - const onReadyFromCacheCb = jest.fn(); - // @ts-expect-error - const serverStorage = InMemoryStorageFactory({ settings: fullSettings }); - serverStorage.splits.setChangeNumber(123); // @ts-expect-error - serverStorage.splits.addSplits([['split1', { name: 'split1' }]]); - serverStorage.segments.addToSegment('segment1', [fullSettings.core.key as string]); - - const preloadedData = dataLoader.getSnapshot(serverStorage, [fullSettings.core.key as string]); - - // @ts-expect-error - const clientStorage = InMemoryStorageCSFactory({ settings: { ...fullSettings, preloadedData }, onReadyFromCacheCb }); - - // Assert - expect(dataLoader.loadData).toBeCalledTimes(1); - expect(onReadyFromCacheCb).toBeCalledTimes(1); - expect(dataLoader.getSnapshot(clientStorage, [fullSettings.core.key as string])).toEqual(preloadedData); - expect(preloadedData).toEqual({ - since: 123, - splitsData: [{ name: 'split1' }], - mySegmentsData: { [fullSettings.core.key as string]: ['segment1'] }, - segmentsData: undefined - }); -}); diff --git a/src/storages/dataLoader.ts b/src/storages/dataLoader.ts index 7b44df91..24898d68 100644 --- a/src/storages/dataLoader.ts +++ b/src/storages/dataLoader.ts @@ -1,85 +1,55 @@ import { SplitIO } from '../types'; -import { ISegmentsCacheSync, ISplitsCacheSync, IStorageSync } from './types'; -import { setToArray, ISet } from '../utils/lang/sets'; -import { getMatching } from '../utils/key'; +import { DEFAULT_CACHE_EXPIRATION_IN_MILLIS } from '../utils/constants/browser'; +import { DataLoader, ISegmentsCacheSync, ISplitsCacheSync } from './types'; /** - * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function - * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) + * Factory of client-side storage loader * - * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader and extended with a `mySegmentsData` property. - * @param storage object containing `splits` and `segments` cache (client-side variant) - * @param userKey user key (matching key) of the provided MySegmentsCache - * - * @TODO extend to load largeSegments - * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. - * @TODO add logs, and input validation in this module, in favor of size reduction. - * @TODO unit tests + * @param preloadedData validated data following the format proposed in https://github.com/godaddy/split-javascript-data-loader + * and extended with a `mySegmentsData` property. + * @returns function to preload the storage */ -export function loadData(preloadedData: SplitIO.PreloadedData, storage: { splits?: ISplitsCacheSync, segments: ISegmentsCacheSync, largeSegments?: ISegmentsCacheSync }, matchingKey?: string) { - // Do not load data if current preloadedData is empty - if (Object.keys(preloadedData).length === 0) return; - - const { segmentsData = {}, since = -1, splitsData = [] } = preloadedData; +export function dataLoaderFactory(preloadedData: SplitIO.PreloadedData): DataLoader { + + /** + * Storage-agnostic adaptation of `loadDataIntoLocalStorage` function + * (https://github.com/godaddy/split-javascript-data-loader/blob/master/src/load-data.js) + * + * @param storage object containing `splits` and `segments` cache (client-side variant) + * @param userId user key string of the provided MySegmentsCache + * + * @TODO extend to support SegmentsCache (server-side variant) by making `userId` optional and adding the corresponding logic. + * @TODO extend to load data on shared mySegments storages. Be specific when emitting SDK_READY_FROM_CACHE on shared clients. Maybe the serializer should provide the `useSegments` flag. + */ + return function loadData(storage: { splits: ISplitsCacheSync, segments: ISegmentsCacheSync }, userId: string) { + // Do not load data if current preloadedData is empty + if (Object.keys(preloadedData).length === 0) return; + + const { lastUpdated = -1, segmentsData = {}, since = -1, splitsData = {} } = preloadedData; - if (storage.splits) { const storedSince = storage.splits.getChangeNumber(); + const expirationTimestamp = Date.now() - DEFAULT_CACHE_EXPIRATION_IN_MILLIS; - // Do not load data if current data is more recent - if (storedSince > since) return; + // Do not load data if current localStorage data is more recent, + // or if its `lastUpdated` timestamp is older than the given `expirationTimestamp`, + if (storedSince > since || lastUpdated < expirationTimestamp) return; // cleaning up the localStorage data, since some cached splits might need be part of the preloaded data storage.splits.clear(); storage.splits.setChangeNumber(since); // splitsData in an object where the property is the split name and the pertaining value is a stringified json of its data - storage.splits.addSplits(splitsData.map(split => ([split.name, split]))); - } + storage.splits.addSplits(Object.keys(splitsData).map(splitName => JSON.parse(splitsData[splitName]))); - if (matchingKey) { // add mySegments data (client-side) - let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[matchingKey]; + // add mySegments data + let mySegmentsData = preloadedData.mySegmentsData && preloadedData.mySegmentsData[userId]; if (!mySegmentsData) { // segmentsData in an object where the property is the segment name and the pertaining value is a stringified object that contains the `added` array of userIds mySegmentsData = Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - return matchingKeys.indexOf(matchingKey) > -1; + const userIds = JSON.parse(segmentsData[segmentName]).added; + return Array.isArray(userIds) && userIds.indexOf(userId) > -1; }); } storage.segments.resetSegments({ k: mySegmentsData.map(s => ({ n: s })) }); - } else { // add segments data (server-side) - Object.keys(segmentsData).filter(segmentName => { - const matchingKeys = segmentsData[segmentName]; - storage.segments.addToSegment(segmentName, matchingKeys); - }); - } -} - -export function getSnapshot(storage: IStorageSync, userKeys?: SplitIO.SplitKey[]): SplitIO.PreloadedData { - return { - // lastUpdated: Date.now(), - since: storage.splits.getChangeNumber(), - splitsData: storage.splits.getAll(), - segmentsData: userKeys ? - undefined : // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, cur) => { // @ts-ignore accessing private prop - prev[cur] = setToArray(storage.segments.segmentCache[cur] as ISet); - return prev; - }, {}), - mySegmentsData: userKeys ? - userKeys.reduce>((prev, userKey) => { - prev[getMatching(userKey)] = storage.shared ? - // Client-side segments - // @ts-ignore accessing private prop - Object.keys(storage.shared(userKey).segments.segmentCache) : - // Server-side segments - // @ts-ignore accessing private prop - Object.keys(storage.segments.segmentCache).reduce((prev, segmentName) => { // @ts-ignore accessing private prop - return storage.segments.segmentCache[segmentName].has(userKey) ? - prev.concat(segmentName) : - prev; - }, []); - return prev; - }, {}) : - undefined }; } diff --git a/src/storages/inMemory/InMemoryStorageCS.ts b/src/storages/inMemory/InMemoryStorageCS.ts index 670b91f1..30667369 100644 --- a/src/storages/inMemory/InMemoryStorageCS.ts +++ b/src/storages/inMemory/InMemoryStorageCS.ts @@ -7,8 +7,6 @@ import { ImpressionCountsCacheInMemory } from './ImpressionCountsCacheInMemory'; import { DEBUG, LOCALHOST_MODE, NONE, STORAGE_MEMORY } from '../../utils/constants'; import { shouldRecordTelemetry, TelemetryCacheInMemory } from './TelemetryCacheInMemory'; import { UniqueKeysCacheInMemoryCS } from './UniqueKeysCacheInMemoryCS'; -import { getMatching } from '../../utils/key'; -import { loadData } from '../dataLoader'; /** * InMemory storage factory for standalone client-side SplitFactory @@ -16,7 +14,7 @@ import { loadData } from '../dataLoader'; * @param params parameters required by EventsCacheSync */ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorageSync { - const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation }, preloadedData }, onReadyFromCacheCb } = params; + const { settings: { scheduler: { impressionsQueueSize, eventsQueueSize, }, sync: { impressionsMode, __splitFiltersValidation } } } = params; const splits = new SplitsCacheInMemory(__splitFiltersValidation); const segments = new MySegmentsCacheInMemory(); @@ -44,18 +42,11 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag }, // When using shared instanciation with MEMORY we reuse everything but segments (they are unique per key) - shared(matchingKey: string) { - const segments = new MySegmentsCacheInMemory(); - const largeSegments = new MySegmentsCacheInMemory(); - - if (preloadedData) { - loadData(preloadedData, { segments, largeSegments }, matchingKey); - } - + shared() { return { splits: this.splits, - segments, - largeSegments, + segments: new MySegmentsCacheInMemory(), + largeSegments: new MySegmentsCacheInMemory(), impressions: this.impressions, impressionCounts: this.impressionCounts, events: this.events, @@ -81,12 +72,6 @@ export function InMemoryStorageCSFactory(params: IStorageFactoryParams): IStorag if (storage.uniqueKeys) storage.uniqueKeys.track = noopTrack; } - - if (preloadedData) { - loadData(preloadedData, storage, getMatching(params.settings.core.key)); - if (splits.getChangeNumber() > -1) onReadyFromCacheCb(); - } - return storage; } diff --git a/src/storages/types.ts b/src/storages/types.ts index 21945587..61ab10f2 100644 --- a/src/storages/types.ts +++ b/src/storages/types.ts @@ -492,6 +492,8 @@ export interface IStorageAsync extends IStorageBase< /** StorageFactory */ +export type DataLoader = (storage: IStorageSync, matchingKey: string) => void + export interface IStorageFactoryParams { settings: ISettings, /** diff --git a/src/trackers/eventTracker.ts b/src/trackers/eventTracker.ts index 18b1e94c..8efcf413 100644 --- a/src/trackers/eventTracker.ts +++ b/src/trackers/eventTracker.ts @@ -32,8 +32,8 @@ export function eventTrackerFactory( if (tracked) { log.info(EVENTS_TRACKER_SUCCESS, [msg]); if (integrationsManager) { - // Wrap in a timeout because we don't want it to be blocking. whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. setTimeout(() => { // copy of event, to avoid unexpected behaviour if modified by integrations const eventDataCopy = objectAssign({}, eventData); diff --git a/src/trackers/impressionsTracker.ts b/src/trackers/impressionsTracker.ts index d8a3fbc0..dcf998fc 100644 --- a/src/trackers/impressionsTracker.ts +++ b/src/trackers/impressionsTracker.ts @@ -67,8 +67,8 @@ export function impressionsTrackerFactory( sdkLanguageVersion: version }; - // Wrap in a timeout because we don't want it to be blocking. whenInit(() => { + // Wrap in a timeout because we don't want it to be blocking. setTimeout(() => { // integrationsManager.handleImpression does not throw errors if (integrationsManager) integrationsManager.handleImpression(impressionData); diff --git a/src/types.ts b/src/types.ts index 777b3258..2a65b297 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,4 @@ -import { ISplit, ISplitFiltersValidation } from './dtos/types'; +import { ISplitFiltersValidation } from './dtos/types'; import { IIntegration, IIntegrationFactoryParams } from './integrations/types'; import { ILogger } from './logger/types'; import { ISdkFactoryContext } from './sdkFactory/types'; @@ -98,7 +98,6 @@ export interface ISettings { eventsFirstPushWindow: number }, readonly storage: IStorageSyncFactory | IStorageAsyncFactory, - readonly preloadedData?: SplitIO.PreloadedData, readonly integrations: Array<{ readonly type: string, (params: IIntegrationFactoryParams): IIntegration | void @@ -772,20 +771,21 @@ export namespace SplitIO { * If this value is older than 10 days ago (expiration time policy), the data is not used to update the storage content. * @TODO configurable expiration time policy? */ - // lastUpdated: number, + lastUpdated: number, /** * Change number of the preloaded data. * If this value is older than the current changeNumber at the storage, the data is not used to update the storage content. */ since: number, /** - * List of feature flag definitions. - * @TODO rename to flags + * Map of feature flags to their stringified definitions. */ - splitsData: ISplit[], + splitsData: { + [splitName: string]: string + }, /** * Optional map of user keys to their list of segments. - * @TODO rename to memberships + * @TODO remove when releasing first version */ mySegmentsData?: { [key: string]: string[] @@ -793,10 +793,9 @@ export namespace SplitIO { /** * Optional map of segments to their stringified definitions. * This property is ignored if `mySegmentsData` was provided. - * @TODO rename to segments */ segmentsData?: { - [segmentName: string]: string[] + [segmentName: string]: string }, } /**