Skip to main content

Class: ReActAgent

Runner will manage the task execution and provide a high-level API for the user

Extends

Constructors

new ReActAgent()

new ReActAgent(params): ReActAgent

Parameters

params: ReACTAgentParamsWithTools | ReACTAgentParamsWithToolRetriever

Returns

ReActAgent

Overrides

AgentRunner . constructor

Source

packages/llamaindex/src/agent/react.ts:345

Properties

#chatHistory

private #chatHistory: ChatMessage<object>[]

Inherited from

AgentRunner . #chatHistory

Source

packages/llamaindex/src/agent/base.ts:207


#llm

private readonly #llm: LLM<object, object>

Inherited from

AgentRunner . #llm

Source

packages/llamaindex/src/agent/base.ts:202


#runner

private readonly #runner: AgentWorker <LLM<object, object>, ReACTAgentStore, object>

Inherited from

AgentRunner . #runner

Source

packages/llamaindex/src/agent/base.ts:208


#systemPrompt

private readonly #systemPrompt: null | MessageContent = null

Inherited from

AgentRunner . #systemPrompt

Source

packages/llamaindex/src/agent/base.ts:206


#tools

private readonly #tools: BaseToolWithCall[] | (query) => Promise <BaseToolWithCall[]>

Inherited from

AgentRunner . #tools

Source

packages/llamaindex/src/agent/base.ts:203


#verbose

private readonly #verbose: boolean

Inherited from

AgentRunner . #verbose

Source

packages/llamaindex/src/agent/base.ts:209


defaultTaskHandler

static defaultTaskHandler: TaskHandler <LLM<object, object>>

Inherited from

AgentRunner . defaultTaskHandler

Source

packages/llamaindex/src/agent/base.ts:218


taskHandler

static taskHandler: TaskHandler <LLM<object, object>, ReACTAgentStore>

Source

packages/llamaindex/src/agent/react.ts:367

Accessors

chatHistory

get chatHistory(): ChatMessage<AdditionalMessageOptions>[]

Returns

ChatMessage<AdditionalMessageOptions>[]

Source

packages/llamaindex/src/agent/base.ts:263


llm

get llm(): AI

Returns

AI

Source

packages/llamaindex/src/agent/base.ts:259


verbose

get verbose(): boolean

Returns

boolean

Source

packages/llamaindex/src/agent/base.ts:267

Methods

chat()

chat(params)

chat(params): Promise <EngineResponse>

Send message along with the class's current chat history to the LLM.

Parameters

params: ChatEngineParamsNonStreaming

Returns

Promise <EngineResponse>

Inherited from

AgentRunner . chat

Source

packages/llamaindex/src/agent/base.ts:334

chat(params)

chat(params): Promise<ReadableStream <EngineResponse>>

Parameters

params: ChatEngineParamsStreaming

Returns

Promise<ReadableStream <EngineResponse>>

Inherited from

AgentRunner . chat

Source

packages/llamaindex/src/agent/base.ts:335


createStore()

createStore(): object

Returns

object

reasons

reasons: never[] = []

Overrides

AgentRunner . createStore

Source

packages/llamaindex/src/agent/react.ts:361


createTask()

createTask(message, stream, verbose, chatHistory?): ReadableStream<TaskStepOutput <LLM<object, object>, ReACTAgentStore, object>>

Parameters

message: MessageContent

stream: boolean= false

verbose: undefined | boolean= undefined

chatHistory?: ChatMessage<object>[]

Returns

ReadableStream<TaskStepOutput <LLM<object, object>, ReACTAgentStore, object>>

Inherited from

AgentRunner . createTask

Source

packages/llamaindex/src/agent/base.ts:294


getTools()

getTools(query): BaseToolWithCall[] | Promise <BaseToolWithCall[]>

Parameters

query: MessageContent

Returns

BaseToolWithCall[] | Promise <BaseToolWithCall[]>

Inherited from

AgentRunner . getTools

Source

packages/llamaindex/src/agent/base.ts:275


reset()

reset(): void

Resets the chat history so that it's empty.

Returns

void

Inherited from

AgentRunner . reset

Source

packages/llamaindex/src/agent/base.ts:271


defaultCreateStore()

static defaultCreateStore(): object

Returns

object

Inherited from

AgentRunner . defaultCreateStore

Source

packages/llamaindex/src/agent/base.ts:214


shouldContinue()

static shouldContinue<AI, Store, AdditionalMessageOptions>(task): boolean

Type parameters

AI extends LLM<object, object>

Store extends object = object

AdditionalMessageOptions extends object = AI extends LLM<object, AdditionalMessageOptions> ? AdditionalMessageOptions : never

Parameters

task: Readonly<TaskStep<AI, Store, AdditionalMessageOptions>>

Returns

boolean

Inherited from

AgentRunner . shouldContinue

Source

packages/llamaindex/src/agent/base.ts:281