Class: Gemini
ToolCallLLM for Gemini
Extends
ToolCallLLM
<GeminiAdditionalChatOptions
>
Constructors
new Gemini()
new Gemini(
init
?):Gemini
Parameters
• init?: GeminiConfig
Returns
Overrides
ToolCallLLM<GeminiAdditionalChatOptions>.constructor
Source
packages/core/src/llm/gemini.ts:233
Properties
maxTokens?
optional
maxTokens:number
Source
packages/core/src/llm/gemini.ts:230
model
model:
GEMINI_MODEL
Source
packages/core/src/llm/gemini.ts:227
session
session:
GeminiSession
Source
packages/core/src/llm/gemini.ts:231
temperature
temperature:
number
Source
packages/core/src/llm/gemini.ts:228
topP
topP:
number
Source
packages/core/src/llm/gemini.ts:229
Accessors
metadata
get
metadata():LLMMetadata
Returns
Source
packages/core/src/llm/gemini.ts:246
supportToolCall
get
supportToolCall():boolean
Returns
boolean
Source
packages/core/src/llm/gemini.ts:242
Methods
chat()
chat(params)
chat(
params
):Promise
<GeminiChatStreamResponse
>
Parameters
• params: GeminiChatParamsStreaming
Returns
Promise
<GeminiChatStreamResponse
>
Overrides
ToolCallLLM.chat
Source
packages/core/src/llm/gemini.ts:311
chat(params)
chat(
params
):Promise
<GeminiChatNonStreamResponse
>
Parameters
• params: GeminiChatParamsNonStreaming
Returns
Promise
<GeminiChatNonStreamResponse
>
Overrides
ToolCallLLM.chat
Source
packages/core/src/llm/gemini.ts:312
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Overrides
ToolCallLLM.complete
Source
packages/core/src/llm/gemini.ts:323
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Overrides
ToolCallLLM.complete
Source
packages/core/src/llm/gemini.ts:326
nonStreamChat()
protected
nonStreamChat(params
):Promise
<GeminiChatNonStreamResponse
>
Parameters
• params: GeminiChatParamsNonStreaming
Returns
Promise
<GeminiChatNonStreamResponse
>
Source
packages/core/src/llm/gemini.ts:282
prepareChat()
private
prepareChat(params
):object
Parameters
• params: GeminiChatParamsStreaming
| GeminiChatParamsNonStreaming
Returns
object
chat
chat:
ChatSession
messageContent
messageContent:
Part
[]
Source
packages/core/src/llm/gemini.ts:257
streamChat()
protected
streamChat(params
):GeminiChatStreamResponse
Parameters
• params: GeminiChatParamsStreaming
Returns
GeminiChatStreamResponse