From bfee29843125c55be1b21e4f77c9d8c3c8698856 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:14:16 -0800 Subject: [PATCH 1/2] refactor!: Change versionKey to variationKey. (#709) --- .../__tests__/LDAIClientImpl.test.ts | 4 +- .../__tests__/LDAIConfigTrackerImpl.test.ts | 64 +++++++++---------- packages/sdk/server-ai/src/LDAIClientImpl.ts | 4 +- .../server-ai/src/LDAIConfigTrackerImpl.ts | 6 +- 4 files changed, 39 insertions(+), 39 deletions(-) diff --git a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts index 7a4819633..41d035564 100644 --- a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts @@ -33,7 +33,7 @@ it('returns config with interpolated messagess', async () => { { role: 'user', content: 'Score: {{score}}' }, ], _ldMeta: { - versionKey: 'v1', + variationKey: 'v1', enabled: true, }, }; @@ -70,7 +70,7 @@ it('includes context in variables for messages interpolation', async () => { const mockVariation = { messages: [{ role: 'system', content: 'User key: {{ldctx.key}}' }], - _ldMeta: { versionKey: 'v1', enabled: true }, + _ldMeta: { variationKey: 'v1', enabled: true }, }; mockLdClient.variation.mockResolvedValue(mockVariation); diff --git a/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts index 532002dba..baddd6dcd 100644 --- a/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts @@ -13,26 +13,26 @@ const mockLdClient: LDClientMin = { const testContext: LDContext = { kind: 'user', key: 'test-user' }; const configKey = 'test-config'; -const versionKey = 'v1'; +const variationKey = 'v1'; beforeEach(() => { jest.clearAllMocks(); }); it('tracks duration', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackDuration(1000); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:duration:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1000, ); }); it('tracks duration of async function', async () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); const result = await tracker.trackDurationOf(async () => 'test-result'); @@ -41,49 +41,49 @@ it('tracks duration of async function', async () => { expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:duration:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1000, ); }); it('tracks positive feedback', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackFeedback({ kind: LDFeedbackKind.Positive }); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:feedback:user:positive', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1, ); }); it('tracks negative feedback', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackFeedback({ kind: LDFeedbackKind.Negative }); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:feedback:user:negative', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1, ); }); it('tracks success', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackSuccess(); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:generation', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1, ); }); it('tracks OpenAI usage', async () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); const TOTAL_TOKENS = 100; @@ -101,41 +101,41 @@ it('tracks OpenAI usage', async () => { expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:duration:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1000, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:generation', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, TOTAL_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:input', testContext, - { configKey, versionKey }, + { configKey, variationKey }, PROMPT_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:output', testContext, - { configKey, versionKey }, + { configKey, variationKey }, COMPLETION_TOKENS, ); }); it('tracks Bedrock conversation with successful response', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); const TOTAL_TOKENS = 100; const PROMPT_TOKENS = 49; @@ -156,41 +156,41 @@ it('tracks Bedrock conversation with successful response', () => { expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:generation', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 1, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:duration:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 500, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, TOTAL_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:input', testContext, - { configKey, versionKey }, + { configKey, variationKey }, PROMPT_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:output', testContext, - { configKey, versionKey }, + { configKey, variationKey }, COMPLETION_TOKENS, ); }); it('tracks Bedrock conversation with error response', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); const response = { $metadata: { httpStatusCode: 400 }, @@ -204,7 +204,7 @@ it('tracks Bedrock conversation with error response', () => { }); it('tracks tokens', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); const TOTAL_TOKENS = 100; const PROMPT_TOKENS = 49; @@ -219,27 +219,27 @@ it('tracks tokens', () => { expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:total', testContext, - { configKey, versionKey }, + { configKey, variationKey }, TOTAL_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:input', testContext, - { configKey, versionKey }, + { configKey, variationKey }, PROMPT_TOKENS, ); expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:output', testContext, - { configKey, versionKey }, + { configKey, variationKey }, COMPLETION_TOKENS, ); }); it('only tracks non-zero token counts', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackTokens({ total: 0, @@ -257,7 +257,7 @@ it('only tracks non-zero token counts', () => { expect(mockTrack).toHaveBeenCalledWith( '$ld:ai:tokens:input', testContext, - { configKey, versionKey }, + { configKey, variationKey }, 50, ); @@ -270,7 +270,7 @@ it('only tracks non-zero token counts', () => { }); it('returns empty summary when no metrics tracked', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); const summary = tracker.getSummary(); @@ -278,7 +278,7 @@ it('returns empty summary when no metrics tracked', () => { }); it('summarizes tracked metrics', () => { - const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext); + const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext); tracker.trackDuration(1000); tracker.trackTokens({ diff --git a/packages/sdk/server-ai/src/LDAIClientImpl.ts b/packages/sdk/server-ai/src/LDAIClientImpl.ts index bbaae74e9..965c49ea7 100644 --- a/packages/sdk/server-ai/src/LDAIClientImpl.ts +++ b/packages/sdk/server-ai/src/LDAIClientImpl.ts @@ -11,7 +11,7 @@ import { LDClientMin } from './LDClientMin'; * Metadata assorted with a model configuration variation. */ interface LDMeta { - versionKey: string; + variationKey: string; enabled: boolean; } @@ -44,7 +44,7 @@ export class LDAIClientImpl implements LDAIClient { this._ldClient, key, // eslint-disable-next-line no-underscore-dangle - value._ldMeta?.versionKey ?? '', + value._ldMeta?.variationKey ?? '', context, ); // eslint-disable-next-line no-underscore-dangle diff --git a/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts b/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts index 2894f965e..17e49387b 100644 --- a/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts +++ b/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts @@ -12,13 +12,13 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker { constructor( private _ldClient: LDClientMin, private _configKey: string, - private _versionKey: string, + private _variationKey: string, private _context: LDContext, ) {} - private _getTrackData(): { versionKey: string; configKey: string } { + private _getTrackData(): { variationKey: string; configKey: string } { return { - versionKey: this._versionKey, + variationKey: this._variationKey, configKey: this._configKey, }; } From b91787d44de55ee7369469e9c0b97e89a7335804 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:18:58 -0800 Subject: [PATCH 2/2] chore: release main (#710) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---
server-sdk-ai: 0.6.0 ## [0.6.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.5.0...server-sdk-ai-v0.6.0) (2024-12-10) ### ⚠ BREAKING CHANGES * Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709)) ### Code Refactoring * Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709)) ([bfee298](https://github.com/launchdarkly/js-core/commit/bfee29843125c55be1b21e4f77c9d8c3c8698856))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- packages/sdk/server-ai/CHANGELOG.md | 11 +++++++++++ packages/sdk/server-ai/examples/bedrock/package.json | 2 +- packages/sdk/server-ai/examples/openai/package.json | 2 +- packages/sdk/server-ai/package.json | 2 +- 5 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index f934ffba8..b6f350b60 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -14,5 +14,5 @@ "packages/sdk/react-native": "10.9.3", "packages/telemetry/node-server-sdk-otel": "1.1.2", "packages/sdk/browser": "0.3.3", - "packages/sdk/server-ai": "0.5.0" + "packages/sdk/server-ai": "0.6.0" } diff --git a/packages/sdk/server-ai/CHANGELOG.md b/packages/sdk/server-ai/CHANGELOG.md index 642fc8326..a54ffddb5 100644 --- a/packages/sdk/server-ai/CHANGELOG.md +++ b/packages/sdk/server-ai/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [0.6.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.5.0...server-sdk-ai-v0.6.0) (2024-12-10) + + +### ⚠ BREAKING CHANGES + +* Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709)) + +### Code Refactoring + +* Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709)) ([bfee298](https://github.com/launchdarkly/js-core/commit/bfee29843125c55be1b21e4f77c9d8c3c8698856)) + ## [0.5.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.4.0...server-sdk-ai-v0.5.0) (2024-12-09) diff --git a/packages/sdk/server-ai/examples/bedrock/package.json b/packages/sdk/server-ai/examples/bedrock/package.json index 80b6345c1..18699f6c6 100644 --- a/packages/sdk/server-ai/examples/bedrock/package.json +++ b/packages/sdk/server-ai/examples/bedrock/package.json @@ -24,7 +24,7 @@ "dependencies": { "@aws-sdk/client-bedrock-runtime": "^3.679.0", "@launchdarkly/node-server-sdk": "^9.7.1", - "@launchdarkly/server-sdk-ai": "0.5.0" + "@launchdarkly/server-sdk-ai": "0.6.0" }, "devDependencies": { "@trivago/prettier-plugin-sort-imports": "^4.1.1", diff --git a/packages/sdk/server-ai/examples/openai/package.json b/packages/sdk/server-ai/examples/openai/package.json index c7bd4834f..6713129b9 100644 --- a/packages/sdk/server-ai/examples/openai/package.json +++ b/packages/sdk/server-ai/examples/openai/package.json @@ -22,7 +22,7 @@ "license": "Apache-2.0", "dependencies": { "@launchdarkly/node-server-sdk": "^9.7.1", - "@launchdarkly/server-sdk-ai": "0.5.0", + "@launchdarkly/server-sdk-ai": "0.6.0", "openai": "^4.58.1" }, "devDependencies": { diff --git a/packages/sdk/server-ai/package.json b/packages/sdk/server-ai/package.json index 0c1383c7e..c614c36b1 100644 --- a/packages/sdk/server-ai/package.json +++ b/packages/sdk/server-ai/package.json @@ -1,6 +1,6 @@ { "name": "@launchdarkly/server-sdk-ai", - "version": "0.5.0", + "version": "0.6.0", "description": "LaunchDarkly AI SDK for Server-Side JavaScript", "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/server-ai", "repository": {