Class: Gemini
ToolCallLLM for Gemini
Extends
ToolCallLLM
<GeminiAdditionalChatOptions
>
Constructors
new Gemini()
new Gemini(
init
?):Gemini
Parameters
• init?: GeminiConfig
Returns
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:203
Properties
maxTokens?
optional
maxTokens:number
Defined in
packages/llamaindex/src/llm/gemini/base.ts:200
model
model:
GEMINI_MODEL
Defined in
packages/llamaindex/src/llm/gemini/base.ts:197
session
session:
IGeminiSession
Defined in
packages/llamaindex/src/llm/gemini/base.ts:201
temperature
temperature:
number
Defined in
packages/llamaindex/src/llm/gemini/base.ts:198
topP
topP:
number
Defined in
packages/llamaindex/src/llm/gemini/base.ts:199
Accessors
metadata
Get Signature
get metadata():
LLMMetadata
Returns
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:216
supportToolCall
Get Signature
get supportToolCall():
boolean
Returns
boolean
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:212
Methods
chat()
chat(params)
chat(
params
):Promise
<GeminiChatStreamResponse
>
Get a chat response from the LLM
Parameters
• params: GeminiChatParamsStreaming
Returns
Promise
<GeminiChatStreamResponse
>
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:297
chat(params)
chat(
params
):Promise
<GeminiChatNonStreamResponse
>
Parameters
• params: GeminiChatParamsNonStreaming
Returns
Promise
<GeminiChatNonStreamResponse
>
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:298
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:309
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Overrides
Defined in
packages/llamaindex/src/llm/gemini/base.ts:312
nonStreamChat()
protected
nonStreamChat(params
):Promise
<GeminiChatNonStreamResponse
>
Parameters
• params: GeminiChatParamsNonStreaming
Returns
Promise
<GeminiChatNonStreamResponse
>
Defined in
packages/llamaindex/src/llm/gemini/base.ts:227
streamChat()
protected
streamChat(params
):GeminiChatStreamResponse
Parameters
• params: GeminiChatParamsStreaming
Returns
GeminiChatStreamResponse