Skip to content

Commit

Permalink
refactor!: Rename model and providerid to name.
Browse files Browse the repository at this point in the history
  • Loading branch information
kinyoklion committed Dec 9, 2024
1 parent 686c1f3 commit 173f1d5
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
22 changes: 11 additions & 11 deletions packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,18 @@ it('returns config with interpolated messagess', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
enabled: true,
};

const mockVariation = {
model: {
id: 'example-model',
name: 'example-model',
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
},
provider: {
id: 'example-provider',
name: 'example-provider',
},
messages: [
{ role: 'system', content: 'Hello {{name}}' },
Expand All @@ -45,11 +45,11 @@ it('returns config with interpolated messagess', async () => {

expect(result).toEqual({
model: {
id: 'example-model',
name: 'example-model',
parameters: { name: 'imagination', temperature: 0.7, maxTokens: 4096 },
},
provider: {
id: 'example-provider',
name: 'example-provider',
},
messages: [
{ role: 'system', content: 'Hello John' },
Expand All @@ -64,7 +64,7 @@ it('includes context in variables for messages interpolation', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
};

Expand All @@ -84,12 +84,12 @@ it('handles missing metadata in variation', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'test-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'test', parameters: { name: 'test-model' } },
model: { name: 'test', parameters: { name: 'test-model' } },
messages: [],
};

const mockVariation = {
model: { id: 'example-provider', parameters: { name: 'imagination' } },
model: { name: 'example-provider', parameters: { name: 'imagination' } },
messages: [{ role: 'system', content: 'Hello' }],
};

Expand All @@ -98,7 +98,7 @@ it('handles missing metadata in variation', async () => {
const result = await client.config(key, testContext, defaultValue);

expect(result).toEqual({
model: { id: 'example-provider', parameters: { name: 'imagination' } },
model: { name: 'example-provider', parameters: { name: 'imagination' } },
messages: [{ role: 'system', content: 'Hello' }],
tracker: expect.any(Object),
enabled: false,
Expand All @@ -109,8 +109,8 @@ it('passes the default value to the underlying client', async () => {
const client = new LDAIClientImpl(mockLdClient);
const key = 'non-existent-flag';
const defaultValue: LDAIDefaults = {
model: { id: 'default-model', parameters: { name: 'default' } },
provider: { id: 'default-provider' },
model: { name: 'default-model', parameters: { name: 'default' } },
provider: { name: 'default-provider' },
messages: [{ role: 'system', content: 'Default messages' }],
enabled: true,
};
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/bedrock/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async function main() {
context,
{
model: {
id: 'my-default-model',
name: 'my-default-model',
},
enabled: true,
},
Expand Down
2 changes: 1 addition & 1 deletion packages/sdk/server-ai/examples/openai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ async function main(): Promise<void> {
const completion = await tracker.trackOpenAIMetrics(async () =>
client.chat.completions.create({
messages: aiConfig.messages || [],
model: aiConfig.model?.id || 'gpt-4',
model: aiConfig.model?.name || 'gpt-4',
temperature: (aiConfig.model?.parameters?.temperature as number) ?? 0.5,
max_tokens: (aiConfig.model?.parameters?.maxTokens as number) ?? 4096,
}),
Expand Down
4 changes: 2 additions & 2 deletions packages/sdk/server-ai/src/api/config/LDAIConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export interface LDModelConfig {
/**
* The ID of the model.
*/
id: string;
name: string;

/**
* Model specific parameters.
Expand All @@ -24,7 +24,7 @@ export interface LDProviderConfig {
/**
* The ID of the provider.
*/
id: string;
name: string;
}

/**
Expand Down

0 comments on commit 173f1d5

Please sign in to comment.