Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into ta/sdk-849/fdv2-strea…
Browse files Browse the repository at this point in the history
…ming-datasource
  • Loading branch information
tanderson-ld committed Dec 10, 2024
2 parents 05839b3 + 7720ac9 commit 730b2d6
Show file tree
Hide file tree
Showing 9 changed files with 37 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@
"packages/sdk/react-native": "10.9.3",
"packages/telemetry/node-server-sdk-otel": "1.1.2",
"packages/sdk/browser": "0.3.3",
"packages/sdk/server-ai": "0.4.0"
"packages/sdk/server-ai": "0.5.0"
}
16 changes: 16 additions & 0 deletions packages/sdk/server-ai/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
# Changelog

## [0.5.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.4.0...server-sdk-ai-v0.5.0) (2024-12-09)


### ⚠ BREAKING CHANGES

* Rename model and providerid to name. ([#706](https://github.com/launchdarkly/js-core/issues/706))

### Features

* Add getSummary method to the tracker. ([#698](https://github.com/launchdarkly/js-core/issues/698)) ([4df902d](https://github.com/launchdarkly/js-core/commit/4df902d98584c88b072d6dab5f32a6ea8c4fcdf5))


### Code Refactoring

* Rename model and providerid to name. ([#706](https://github.com/launchdarkly/js-core/issues/706)) ([8dd3951](https://github.com/launchdarkly/js-core/commit/8dd39517cfc14c6e037a2438d22f20a9527c9ffa))

## [0.4.0](https://github.com/launchdarkly/js-core/compare/server-sdk-ai-v0.3.0...server-sdk-ai-v0.4.0) (2024-11-22)


Expand Down
22 changes: 11 additions & 11 deletions packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,18 @@ it('returns config with interpolated messagess', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
enabled: true,
};

const mockVariation = {
model: {
id: 'example-model',
name: 'example-model',
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
},
provider: {
id: 'example-provider',
name: 'example-provider',
},
messages: [
{ role: 'system', content: 'Hello {{name}}' },
Expand All @@ -45,11 +45,11 @@ it('returns config with interpolated messagess', async () => {

expect(result).toEqual({
model: {
id: 'example-model',
name: 'example-model',
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
},
provider: {
id: 'example-provider',
name: 'example-provider',
},
messages: [
{ role: 'system', content: 'Hello John' },
Expand All @@ -64,7 +64,7 @@ it('includes context in variables for messages interpolation', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
};

Expand All @@ -84,12 +84,12 @@ it('handles missing metadata in variation', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
};

const mockVariation = {
model: { id: 'example-provider', parameters: { name: 'imagination' } },
model: { name: 'example-provider', parameters: { name: 'imagination' } },
messages: [{ role: 'system', content: 'Hello' }],
};

Expand All @@ -98,7 +98,7 @@ it('handles missing metadata in variation', async () => {
const result = await client.config(key, testContext, defaultValue);

expect(result).toEqual({
model: { id: 'example-provider', parameters: { name: 'imagination' } },
model: { name: 'example-provider', parameters: { name: 'imagination' } },
messages: [{ role: 'system', content: 'Hello' }],
tracker: expect.any(Object),
enabled: false,
Expand All @@ -109,8 +109,8 @@ it('passes the default value to the underlying client', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'non-existent-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'default-model', parameters: { name: 'default' } },
provider: { id: 'default-provider' },
model: { name: 'default-model', parameters: { name: 'default' } },
provider: { name: 'default-provider' },
messages: [{ role: 'system', content: 'Default messages' }],
enabled: true,
};
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/bedrock/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"dependencies": {
"@aws-sdk/client-bedrock-runtime": "^3.679.0",
"@launchdarkly/node-server-sdk": "^9.7.1",
"@launchdarkly/server-sdk-ai": "0.4.0"
"@launchdarkly/server-sdk-ai": "0.5.0"
},
"devDependencies": {
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/examples/bedrock/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async function main() {
context,
{
model: {
id: 'my-default-model',
name: 'my-default-model',
},
enabled: true,
},
Expand All @@ -66,7 +66,7 @@ async function main() {
const completion = tracker.trackBedrockConverseMetrics(
await awsClient.send(
new ConverseCommand({
modelId: aiConfig.model?.id ?? 'no-model',
modelId: aiConfig.model?.name ?? 'no-model',
messages: mapPromptToConversation(aiConfig.messages ?? []),
inferenceConfig: {
temperature: (aiConfig.model?.parameters?.temperature as number) ?? 0.5,
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"license": "Apache-2.0",
"dependencies": {
"@launchdarkly/node-server-sdk": "^9.7.1",
"@launchdarkly/server-sdk-ai": "0.4.0",
"@launchdarkly/server-sdk-ai": "0.5.0",
"openai": "^4.58.1"
},
"devDependencies": {
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/examples/openai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ async function main(): Promise<void> {
context,
{
model: {
id: 'gpt-4',
name: 'gpt-4',
},
},
{ myVariable: 'My User Defined Variable' },
Expand All @@ -61,7 +61,7 @@ async function main(): Promise<void> {
const completion = await tracker.trackOpenAIMetrics(async () =>
client.chat.completions.create({
messages: aiConfig.messages || [],
model: aiConfig.model?.id || 'gpt-4',
model: aiConfig.model?.name || 'gpt-4',
temperature: (aiConfig.model?.parameters?.temperature as number) ?? 0.5,
max_tokens: (aiConfig.model?.parameters?.maxTokens as number) ?? 4096,
}),
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@launchdarkly/server-sdk-ai",
"version": "0.4.0",
"version": "0.5.0",
"description": "LaunchDarkly AI SDK for Server-Side JavaScript",
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/server-ai",
"repository": {
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/src/api/config/LDAIConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export interface LDModelConfig {
/**
* The ID of the model.
*/
id: string;
name: string;

/**
* Model specific parameters.
Expand All @@ -24,7 +24,7 @@ export interface LDProviderConfig {
/**
* The ID of the provider.
*/
id: string;
name: string;
}

/**
Expand Down

0 comments on commit 730b2d6

Please sign in to comment.