Logo
Classes

ToolCallLLM

Defined in: packages/core/src/llms/base.ts:251

Unified language model interface

Extends

  • BaseLLM<AdditionalChatOptions, AdditionalMessageOptions>

Type Parameters

AdditionalChatOptions

AdditionalChatOptions extends object = object

AdditionalMessageOptions

AdditionalMessageOptions extends ToolCallLLMMessageOptions = ToolCallLLMMessageOptions

Constructors

Constructor

new ToolCallLLM<AdditionalChatOptions, AdditionalMessageOptions>(): ToolCallLLM<AdditionalChatOptions, AdditionalMessageOptions>

Returns

ToolCallLLM<AdditionalChatOptions, AdditionalMessageOptions>

Inherited from

BaseLLM.constructor

Properties

metadata

abstract metadata: LLMMetadata

Defined in: packages/core/src/llms/base.ts:27

Inherited from

BaseLLM.metadata


supportToolCall

abstract supportToolCall: boolean

Defined in: packages/core/src/llms/base.ts:256

Methods

complete()

Call Signature

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Defined in: packages/core/src/llms/base.ts:29

Get a prompt completion from the LLM

Parameters
params

LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

BaseLLM.complete

Call Signature

complete(params): Promise<CompletionResponse>

Defined in: packages/core/src/llms/base.ts:32

Parameters
params

LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete


chat()

Call Signature

abstract chat(params): Promise<AsyncIterable<ChatResponseChunk<AdditionalMessageOptions>, any, any>>

Defined in: packages/core/src/llms/base.ts:64

Get a chat response from the LLM

Parameters
params

LLMChatParamsStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<AdditionalMessageOptions>, any, any>>

Inherited from

BaseLLM.chat

Call Signature

abstract chat(params): Promise<ChatResponse<AdditionalMessageOptions>>

Defined in: packages/core/src/llms/base.ts:70

Parameters
params

LLMChatParamsNonStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<ChatResponse<AdditionalMessageOptions>>

Inherited from

BaseLLM.chat


exec()

Call Signature

exec(params): Promise<ExecStreamResponse<AdditionalMessageOptions>>

Defined in: packages/core/src/llms/base.ts:77

Parameters
params

LLMChatParamsStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<ExecStreamResponse<AdditionalMessageOptions>>

Inherited from

BaseLLM.exec

Call Signature

exec(params): Promise<ExecResponse<AdditionalMessageOptions>>

Defined in: packages/core/src/llms/base.ts:83

Parameters
params

LLMChatParamsNonStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<ExecResponse<AdditionalMessageOptions>>

Inherited from

BaseLLM.exec


streamExec()

streamExec(params): Promise<ExecStreamResponse<AdditionalMessageOptions>>

Defined in: packages/core/src/llms/base.ts:127

Parameters

params

LLMChatParamsStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<ExecStreamResponse<AdditionalMessageOptions>>

Inherited from

BaseLLM.streamExec