Skip to content

Commit b10503d

Browse files
authored
brianyin/ajs-320-bump-openai-to-6.x (#813)
1 parent 8bf6234 commit b10503d

File tree

13 files changed

+82
-158
lines changed

13 files changed

+82
-158
lines changed

.changeset/grumpy-parks-stand.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
---
2+
'@livekit/agents-plugin-google': patch
3+
'@livekit/agents-plugin-openai': patch
4+
'@livekit/agents-plugins-test': patch
5+
'@livekit/agents': patch
6+
---
7+
8+
bump openai to 6.x

agents/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,13 @@
5656
"heap-js": "^2.6.0",
5757
"json-schema": "^0.4.0",
5858
"livekit-server-sdk": "^2.13.3",
59-
"openai": "^4.91.1",
59+
"openai": "^6.8.1",
6060
"pidusage": "^4.0.1",
6161
"pino": "^8.19.0",
6262
"pino-pretty": "^11.0.0",
6363
"sharp": "0.34.3",
6464
"uuid": "^11.1.0",
65-
"ws": "^8.16.0",
65+
"ws": "^8.18.0",
6666
"zod-to-json-schema": "^3.24.6"
6767
},
6868
"peerDependencies": {

agents/src/inference/llm.ts

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
APIStatusError,
88
APITimeoutError,
99
DEFAULT_API_CONNECT_OPTIONS,
10+
type Expand,
1011
toError,
1112
} from '../index.js';
1213
import * as llm from '../llm/index.js';
@@ -34,9 +35,10 @@ export type KimiModels = 'moonshotai/kimi-k2-instruct';
3435

3536
export type DeepSeekModels = 'deepseek-ai/deepseek-v3';
3637

37-
type ChatCompletionPredictionContentParam = OpenAI.Chat.Completions.ChatCompletionPredictionContent;
38-
type WebSearchOptions = OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions;
39-
type ToolChoice = OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice'];
38+
type ChatCompletionPredictionContentParam =
39+
Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;
40+
type WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;
41+
type ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;
4042
type Verbosity = 'low' | 'medium' | 'high';
4143

4244
export interface ChatCompletionOptions extends Record<string, unknown> {
@@ -180,9 +182,13 @@ export class LLM extends llm.LLM {
180182
modelOptions.parallel_tool_calls = parallelToolCalls;
181183
}
182184

183-
toolChoice = toolChoice !== undefined ? toolChoice : this.opts.modelOptions.tool_choice;
185+
toolChoice =
186+
toolChoice !== undefined
187+
? toolChoice
188+
: (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);
189+
184190
if (toolChoice) {
185-
modelOptions.tool_choice = toolChoice;
191+
modelOptions.tool_choice = toolChoice as ToolChoice;
186192
}
187193

188194
// TODO(AJS-270): Add response_format support here
@@ -238,7 +244,7 @@ export class LLMStream extends llm.LLMStream {
238244
toolCtx?: llm.ToolContext;
239245
gatewayOptions?: GatewayOptions;
240246
connOptions: APIConnectOptions;
241-
modelOptions: Record<string, any>;
247+
modelOptions: Record<string, unknown>;
242248
providerFmt?: llm.ProviderFormat;
243249
},
244250
) {
@@ -270,7 +276,7 @@ export class LLMStream extends llm.LLMStream {
270276
description: func.description,
271277
parameters: llm.toJsonSchema(
272278
func.parameters,
273-
) as unknown as OpenAI.Chat.Completions.ChatCompletionTool['function']['parameters'],
279+
) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],
274280
},
275281
}))
276282
: undefined;
@@ -345,7 +351,7 @@ export class LLMStream extends llm.LLMStream {
345351
options: {
346352
statusCode: error.status,
347353
body: error.error,
348-
requestId: error.request_id,
354+
requestId: error.requestID,
349355
retryable,
350356
},
351357
});
@@ -387,10 +393,10 @@ export class LLMStream extends llm.LLMStream {
387393
*
388394
* Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)
389395
* [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]
390-
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='{"location": "P', name=None), type=None)]
391-
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris}', name=None), type=None)]
396+
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\{"location": "P', name=None), type=None)]
397+
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\}', name=None), type=None)]
392398
* [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]
393-
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='{"location": "T', name=None), type=None)]
399+
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\{"location": "T', name=None), type=None)]
394400
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]
395401
* Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)
396402
*/

agents/src/llm/llm.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ export abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCal
7878
connOptions?: APIConnectOptions;
7979
parallelToolCalls?: boolean;
8080
toolChoice?: ToolChoice;
81-
extraKwargs?: Record<string, any>;
81+
extraKwargs?: Record<string, unknown>;
8282
}): LLMStream;
8383

8484
/**

agents/src/llm/provider_format/google.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@ export interface GoogleFormatData {
1212
export async function toChatCtx(
1313
chatCtx: ChatContext,
1414
injectDummyUserMessage: boolean = true,
15-
): Promise<[Record<string, any>[], GoogleFormatData]> {
16-
const turns: Record<string, any>[] = [];
15+
): Promise<[Record<string, unknown>[], GoogleFormatData]> {
16+
const turns: Record<string, unknown>[] = [];
1717
const systemMessages: string[] = [];
1818
let currentRole: string | null = null;
19-
let parts: Record<string, any>[] = [];
19+
let parts: Record<string, unknown>[] = [];
2020

2121
// Flatten all grouped tool calls to get individual messages
2222
const itemGroups = groupToolCalls(chatCtx);
@@ -104,7 +104,7 @@ export async function toChatCtx(
104104
];
105105
}
106106

107-
async function toImagePart(image: ImageContent): Promise<Record<string, any>> {
107+
async function toImagePart(image: ImageContent): Promise<Record<string, unknown>> {
108108
const cacheKey = 'serialized_image';
109109
if (!image._cache[cacheKey]) {
110110
image._cache[cacheKey] = await serializeImage(image);

agents/src/utils.ts

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,23 @@ import { TransformStream, type TransformStreamDefaultController } from 'node:str
1515
import { v4 as uuidv4 } from 'uuid';
1616
import { log } from './log.js';
1717

18+
/**
19+
* Recursively expands all nested properties of a type,
20+
* resolving aliases so as to inspect the real shape in IDE.
21+
*/
22+
// eslint-disable-next-line @typescript-eslint/ban-types
23+
export type Expand<T> = T extends Function
24+
? T
25+
: T extends object
26+
? T extends Array<infer U>
27+
? Array<Expand<U>>
28+
: T extends Map<infer K, infer V>
29+
? Map<Expand<K>, Expand<V>>
30+
: T extends Set<infer M>
31+
? Set<Expand<M>>
32+
: { [K in keyof T]: Expand<T[K]> }
33+
: T;
34+
1835
/** Union of a single and a list of {@link AudioFrame}s */
1936
export type AudioBuffer = AudioFrame[] | AudioFrame;
2037

examples/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
"@livekit/noise-cancellation-node": "^0.1.9",
3838
"@livekit/rtc-node": "^0.13.11",
3939
"livekit-server-sdk": "^2.13.3",
40-
"ws": "^8.16.0"
40+
"ws": "^8.18.0"
4141
},
4242
"peerDependencies": {
4343
"zod": "^3.25.76 || ^4.1.8"

plugins/google/src/llm.test.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,12 @@ import { llm } from '@livekit/agents-plugins-test';
55
import { describe } from 'vitest';
66
import { LLM } from './llm.js';
77

8-
describe.skip('Google', async () => {
8+
describe('Google', async () => {
99
await llm(
1010
new LLM({
1111
model: 'gemini-2.5-flash',
1212
temperature: 0,
1313
}),
14+
true,
1415
);
1516
});

plugins/openai/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,8 @@
4545
},
4646
"dependencies": {
4747
"@livekit/mutex": "^1.1.1",
48-
"openai": "^4.91.1",
49-
"ws": "^8.16.0"
48+
"openai": "^6.8.1",
49+
"ws": "^8.18.0"
5050
},
5151
"peerDependencies": {
5252
"@livekit/agents": "workspace:*",

plugins/openai/src/llm.test.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,6 @@ describe('OpenAI', async () => {
1010
new LLM({
1111
temperature: 0,
1212
}),
13+
false,
1314
);
1415
});

0 commit comments

Comments
 (0)