Class: OpenAIAgent
Extends
AgentRunner
<OpenAI
>
Constructors
new OpenAIAgent()
new OpenAIAgent(
params
):OpenAIAgent
Parameters
• params: OpenAIAgentParams
Returns
Overrides
AgentRunner<OpenAI>.constructor
Source
packages/core/src/agent/openai.ts:36
Properties
#chatHistory
private
#chatHistory:ChatMessage
<ToolCallLLMMessageOptions
>[]
Inherited from
AgentRunner.#chatHistory
Source
packages/core/src/agent/base.ts:230
#llm
private
readonly
#llm:OpenAI
Inherited from
AgentRunner.#llm
Source
packages/core/src/agent/base.ts:225
#runner
private
readonly
#runner:AgentWorker
<OpenAI
,object
,ToolCallLLMMessageOptions
>
Inherited from
AgentRunner.#runner
Source
packages/core/src/agent/base.ts:231
#systemPrompt
private
readonly
#systemPrompt:null
|MessageContent
=null
Inherited from
AgentRunner.#systemPrompt
Source
packages/core/src/agent/base.ts:229
#tools
private
readonly
#tools:BaseToolWithCall
[] | (query
) =>Promise
<BaseToolWithCall
[]>
Inherited from
AgentRunner.#tools
Source
packages/core/src/agent/base.ts:226
#verbose
private
readonly
#verbose:boolean
Inherited from
AgentRunner.#verbose
Source
packages/core/src/agent/base.ts:232
createStore()
createStore: () =>
object
=AgentRunner.defaultCreateStore
Returns
object
Overrides
AgentRunner.createStore
Source
packages/core/src/agent/openai.ts:53
taskHandler
static
taskHandler:TaskHandler
<OpenAI
>
Source
packages/core/src/agent/openai.ts:55
Accessors
chatHistory
get
chatHistory():ChatMessage
<AdditionalMessageOptions
>[]
Returns
ChatMessage
<AdditionalMessageOptions
>[]
Source
packages/core/src/agent/base.ts:259
llm
get
llm():AI
Returns
AI
Source
packages/core/src/agent/base.ts:255
verbose
get
verbose():boolean
Returns
boolean
Source
packages/core/src/agent/base.ts:263
Methods
chat()
chat(params)
chat(
params
):Promise
<AgentChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: ChatEngineParamsNonStreaming
Returns
Promise
<AgentChatResponse
<ToolCallLLMMessageOptions
>>
Inherited from
AgentRunner.chat
Source
packages/core/src/agent/base.ts:329
chat(params)
chat(
params
):Promise
<ReadableStream
<AgentStreamChatResponse
<ToolCallLLMMessageOptions
>>>
Parameters
• params: ChatEngineParamsStreaming
Returns
Promise
<ReadableStream
<AgentStreamChatResponse
<ToolCallLLMMessageOptions
>>>
Inherited from
AgentRunner.chat
Source
packages/core/src/agent/base.ts:332
createTask()
createTask(
message
,stream
,verbose
):ReadableStream
<TaskStepOutput
<OpenAI
,object
,ToolCallLLMMessageOptions
>>
Parameters
• message: MessageContent
• stream: boolean
= false
• verbose: undefined
| boolean
= undefined
Returns
ReadableStream
<TaskStepOutput
<OpenAI
, object
, ToolCallLLMMessageOptions
>>
Inherited from
AgentRunner.createTask
Source
packages/core/src/agent/base.ts:290
getTools()
getTools(
query
):BaseToolWithCall
[] |Promise
<BaseToolWithCall
[]>
Parameters
• query: MessageContent
Returns
BaseToolWithCall
[] | Promise
<BaseToolWithCall
[]>
Inherited from
AgentRunner.getTools
Source
packages/core/src/agent/base.ts:271
reset()
reset():
void
Returns
void
Inherited from
AgentRunner.reset
Source
packages/core/src/agent/base.ts:267
defaultCreateStore()
static
defaultCreateStore():object
Returns
object
Inherited from
AgentRunner.defaultCreateStore
Source
packages/core/src/agent/base.ts:237
shouldContinue()
static
shouldContinue<AI
,Store
,AdditionalMessageOptions
>(task
):boolean
Type parameters
• AI extends LLM
<object
, object
>
• Store extends object
= object
• AdditionalMessageOptions extends object
= AI
extends LLM
<object
, AdditionalMessageOptions
> ? AdditionalMessageOptions
: never
Parameters
• task: Readonly
<TaskStep
<AI
, Store
, AdditionalMessageOptions
>>
Returns
boolean
Inherited from
AgentRunner.shouldContinue