Skip to content

Commit

Permalink
Merge branch 'main' into rlamb/SDK-920/session-replay
Browse files Browse the repository at this point in the history
  • Loading branch information
kinyoklion authored Dec 11, 2024
2 parents 6f0df38 + b91787d commit caf23bd
Show file tree
Hide file tree
Showing 9 changed files with 54 additions and 43 deletions.
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@
"packages/sdk/react-native": "10.9.3",
"packages/telemetry/node-server-sdk-otel": "1.1.2",
"packages/sdk/browser": "0.3.3",
"packages/sdk/server-ai": "0.5.0"
"packages/sdk/server-ai": "0.6.0"
}
11 changes: 11 additions & 0 deletions packages/sdk/server-ai/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
# Changelog

## [0.6.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.5.0...server-sdk-ai-v0.6.0) (2024-12-10)


### ⚠ BREAKING CHANGES

* Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709))

### Code Refactoring

* Change versionKey to variationKey. ([#709](https://github.com/launchdarkly/js-core/issues/709)) ([bfee298](https://github.com/launchdarkly/js-core/commit/bfee29843125c55be1b21e4f77c9d8c3c8698856))

## [0.5.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.4.0...server-sdk-ai-v0.5.0) (2024-12-09)


Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ it('returns config with interpolated messagess', async () => {
{ role: 'user', content: 'Score: {{score}}' },
],
_ldMeta: {
versionKey: 'v1',
variationKey: 'v1',
enabled: true,
},
};
Expand Down Expand Up @@ -70,7 +70,7 @@ it('includes context in variables for messages interpolation', async () => {

const mockVariation = {
messages: [{ role: 'system', content: 'User key: {{ldctx.key}}' }],
_ldMeta: { versionKey: 'v1', enabled: true },
_ldMeta: { variationKey: 'v1', enabled: true },
};

mockLdClient.variation.mockResolvedValue(mockVariation);
Expand Down
64 changes: 32 additions & 32 deletions packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,26 @@ const mockLdClient: LDClientMin = {

const testContext: LDContext = { kind: 'user', key: 'test-user' };
const configKey = 'test-config';
const versionKey = 'v1';
const variationKey = 'v1';

beforeEach(() => {
jest.clearAllMocks();
});

it('tracks duration', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackDuration(1000);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);
});

it('tracks duration of async function', async () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000);

const result = await tracker.trackDurationOf(async () => 'test-result');
Expand All @@ -41,49 +41,49 @@ it('tracks duration of async function', async () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);
});

it('tracks positive feedback', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackFeedback({ kind: LDFeedbackKind.Positive });

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:feedback:user:positive',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks negative feedback', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackFeedback({ kind: LDFeedbackKind.Negative });

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:feedback:user:negative',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks success', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
tracker.trackSuccess();

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);
});

it('tracks OpenAI usage', async () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);
jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000);

const TOTAL_TOKENS = 100;
Expand All @@ -101,41 +101,41 @@ it('tracks OpenAI usage', async () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1000,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('tracks Bedrock conversation with successful response', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const TOTAL_TOKENS = 100;
const PROMPT_TOKENS = 49;
Expand All @@ -156,41 +156,41 @@ it('tracks Bedrock conversation with successful response', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:generation',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
1,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:duration:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
500,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('tracks Bedrock conversation with error response', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const response = {
$metadata: { httpStatusCode: 400 },
Expand All @@ -204,7 +204,7 @@ it('tracks Bedrock conversation with error response', () => {
});

it('tracks tokens', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const TOTAL_TOKENS = 100;
const PROMPT_TOKENS = 49;
Expand All @@ -219,27 +219,27 @@ it('tracks tokens', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:total',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
TOTAL_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
PROMPT_TOKENS,
);

expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:output',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
COMPLETION_TOKENS,
);
});

it('only tracks non-zero token counts', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

tracker.trackTokens({
total: 0,
Expand All @@ -257,7 +257,7 @@ it('only tracks non-zero token counts', () => {
expect(mockTrack).toHaveBeenCalledWith(
'$ld:ai:tokens:input',
testContext,
{ configKey, versionKey },
{ configKey, variationKey },
50,
);

Expand All @@ -270,15 +270,15 @@ it('only tracks non-zero token counts', () => {
});

it('returns empty summary when no metrics tracked', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

const summary = tracker.getSummary();

expect(summary).toEqual({});
});

it('summarizes tracked metrics', () => {
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, versionKey, testContext);
const tracker = new LDAIConfigTrackerImpl(mockLdClient, configKey, variationKey, testContext);

tracker.trackDuration(1000);
tracker.trackTokens({
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/bedrock/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"dependencies": {
"@aws-sdk/client-bedrock-runtime": "^3.679.0",
"@launchdarkly/node-server-sdk": "^9.7.1",
"@launchdarkly/server-sdk-ai": "0.5.0"
"@launchdarkly/server-sdk-ai": "0.6.0"
},
"devDependencies": {
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"license": "Apache-2.0",
"dependencies": {
"@launchdarkly/node-server-sdk": "^9.7.1",
"@launchdarkly/server-sdk-ai": "0.5.0",
"@launchdarkly/server-sdk-ai": "0.6.0",
"openai": "^4.58.1"
},
"devDependencies": {
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@launchdarkly/server-sdk-ai",
"version": "0.5.0",
"version": "0.6.0",
"description": "LaunchDarkly AI SDK for Server-Side JavaScript",
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/server-ai",
"repository": {
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/src/LDAIClientImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { LDClientMin } from './LDClientMin';
* Metadata assorted with a model configuration variation.
*/
interface LDMeta {
versionKey: string;
variationKey: string;
enabled: boolean;
}

Expand Down Expand Up @@ -44,7 +44,7 @@ export class LDAIClientImpl implements LDAIClient {
this._ldClient,
key,
// eslint-disable-next-line no-underscore-dangle
value._ldMeta?.versionKey ?? '',
value._ldMeta?.variationKey ?? '',
context,
);
// eslint-disable-next-line no-underscore-dangle
Expand Down
6 changes: 3 additions & 3 deletions packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
constructor(
private _ldClient: LDClientMin,
private _configKey: string,
private _versionKey: string,
private _variationKey: string,
private _context: LDContext,
) {}

private _getTrackData(): { versionKey: string; configKey: string } {
private _getTrackData(): { variationKey: string; configKey: string } {
return {
versionKey: this._versionKey,
variationKey: this._variationKey,
configKey: this._configKey,
};
}
Expand Down

0 comments on commit caf23bd

Please sign in to comment.