Skip to main content

Interface: LLM<AdditionalChatOptions, AdditionalMessageOptions>

Unified language model interface

Extends

Type parameters

AdditionalChatOptions extends object = object

AdditionalMessageOptions extends object = object

Properties

metadata

metadata: LLMMetadata

Source

packages/llamaindex/src/llm/types.ts:57

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable <ChatResponseChunk<object>>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise<AsyncIterable <ChatResponseChunk<object>>>

Overrides

LLMChat . chat

Source

packages/llamaindex/src/llm/types.ts:61

chat(params)

chat(params): Promise <ChatResponse<AdditionalMessageOptions>>

Parameters

params: LLMChatParamsNonStreaming<AdditionalChatOptions, AdditionalMessageOptions>

Returns

Promise <ChatResponse<AdditionalMessageOptions>>

Overrides

LLMChat.chat

Source

packages/llamaindex/src/llm/types.ts:67


complete()

complete(params)

complete(params): Promise<AsyncIterable <CompletionResponse>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable <CompletionResponse>>

Source

packages/llamaindex/src/llm/types.ts:77

complete(params)

complete(params): Promise <CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise <CompletionResponse>

Source

packages/llamaindex/src/llm/types.ts:80