Class: Anthropic
Extends
ToolCallLLM
<AnthropicAdditionalChatOptions
>
Constructors
new Anthropic()
new Anthropic(
init
?):Anthropic
Parameters
• init?: Partial
<Anthropic
>
Returns
Overrides
ToolCallLLM<AnthropicAdditionalChatOptions>.constructor
Source
packages/core/src/llm/anthropic.ts:115
Properties
apiKey?
optional
apiKey:string
=undefined
Source
packages/core/src/llm/anthropic.ts:110
maxRetries
maxRetries:
number
Source
packages/core/src/llm/anthropic.ts:111
maxTokens?
optional
maxTokens:number
Source
packages/core/src/llm/anthropic.ts:107
model
model:
"claude-3-opus"
|"claude-3-sonnet"
|"claude-3-haiku"
|"claude-2.1"
|"claude-instant-1.2"
Source
packages/core/src/llm/anthropic.ts:104
session
session:
AnthropicSession
Source
packages/core/src/llm/anthropic.ts:113
temperature
temperature:
number
Source
packages/core/src/llm/anthropic.ts:105
timeout?
optional
timeout:number
Source
packages/core/src/llm/anthropic.ts:112
topP
topP:
number
Source
packages/core/src/llm/anthropic.ts:106
Accessors
metadata
get
metadata():object
Returns
object
contextWindow
contextWindow:
number
maxTokens
maxTokens:
undefined
|number
model
model:
"claude-3-opus"
|"claude-3-sonnet"
|"claude-3-haiku"
|"claude-2.1"
|"claude-instant-1.2"
temperature
temperature:
number
tokenizer
tokenizer:
undefined
=undefined
topP
topP:
number
Source
packages/core/src/llm/anthropic.ts:138
supportToolCall
get
supportToolCall():boolean
Returns
boolean
Source
packages/core/src/llm/anthropic.ts:134
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Parameters
• params: LLMChatParamsStreaming
<AnthropicAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Overrides
ToolCallLLM.chat
Source
packages/core/src/llm/anthropic.ts:259
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<AnthropicAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
Source
packages/core/src/llm/anthropic.ts:265
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
ToolCallLLM.complete
Source
packages/core/src/llm/base.ts:22
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
ToolCallLLM.complete
Source
packages/core/src/llm/base.ts:25
formatMessages()
formatMessages<
Beta
>(messages
):Beta
extendstrue
?ToolsBetaMessageParam
[] :MessageParam
[]
Type parameters
• Beta = false
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
Beta
extends true
? ToolsBetaMessageParam
[] : MessageParam
[]
Source
packages/core/src/llm/anthropic.ts:156
getModelName()
getModelName(
model
):string
Parameters
• model: string
Returns
string
Source
packages/core/src/llm/anthropic.ts:149
streamChat()
protected
streamChat(messages
,systemPrompt
?):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
• systemPrompt?: null
| string
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Source
packages/core/src/llm/anthropic.ts:375
toTool()
static
toTool(tool
):Tool
Parameters
• tool: BaseTool
<any
>
Returns
Tool