OpenAI
Defined in: packages/providers/openai/src/llm.ts:49
Extends
ToolCallLLM
<OpenAIAdditionalChatOptions
>
Constructors
Constructor
new OpenAI(
init?
):OpenAI
Defined in: packages/providers/openai/src/llm.ts:79
Parameters
init?
Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
OpenAI
Overrides
ToolCallLLM<OpenAIAdditionalChatOptions>.constructor
Properties
model
model:
ChatModel
|string
&object
Defined in: packages/providers/openai/src/llm.ts:50
temperature
temperature:
number
Defined in: packages/providers/openai/src/llm.ts:54
reasoningEffort?
optional
reasoningEffort:"high"
|"low"
|"medium"
Defined in: packages/providers/openai/src/llm.ts:55
topP
topP:
number
Defined in: packages/providers/openai/src/llm.ts:56
maxTokens?
optional
maxTokens:number
Defined in: packages/providers/openai/src/llm.ts:57
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in: packages/providers/openai/src/llm.ts:58
apiKey?
optional
apiKey:string
=undefined
Defined in: packages/providers/openai/src/llm.ts:61
baseURL?
optional
baseURL:string
=undefined
Defined in: packages/providers/openai/src/llm.ts:62
maxRetries
maxRetries:
number
Defined in: packages/providers/openai/src/llm.ts:63
timeout?
optional
timeout:number
Defined in: packages/providers/openai/src/llm.ts:64
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in: packages/providers/openai/src/llm.ts:65
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/llm.ts:70
Returns
Promise
<LLMInstance
>
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/llm.ts:72
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/openai/src/llm.ts:114
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: packages/providers/openai/src/llm.ts:118
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
toOpenAIRole()
static
toOpenAIRole(messageType
):ChatCompletionRole
Defined in: packages/providers/openai/src/llm.ts:134
Parameters
messageType
MessageType
Returns
ChatCompletionRole
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Defined in: packages/providers/openai/src/llm.ts:147
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ChatCompletionMessageParam
[]
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: packages/providers/openai/src/llm.ts:238
Parameters
params
LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/llm.ts:244
Parameters
params
LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/llm.ts:342
Parameters
baseRequestParams
ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
toTool()
static
toTool(tool
):ChatCompletionTool
Defined in: packages/providers/openai/src/llm.ts:428
Parameters
tool
BaseTool
Returns
ChatCompletionTool