Class: Anthropic
Hierarchy
-
ToolCallLLM
<AnthropicAdditionalChatOptions
>↳
Anthropic
Constructors
constructor
• new Anthropic(init?
): Anthropic
Parameters
Name | Type |
---|---|
init? | Partial <Anthropic > |
Returns
Overrides
ToolCallLLM<AnthropicAdditionalChatOptions>.constructor
Defined in
packages/core/src/llm/anthropic.ts:115
Properties
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/anthropic.ts:110
maxRetries
• maxRetries: number
Defined in
packages/core/src/llm/anthropic.ts:111
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/anthropic.ts:107
model
• model: "claude-3-opus"
| "claude-3-sonnet"
| "claude-3-haiku"
| "claude-2.1"
| "claude-instant-1.2"
Defined in
packages/core/src/llm/anthropic.ts:104
session
• session: AnthropicSession
Defined in
packages/core/src/llm/anthropic.ts:113
temperature
• temperature: number
Defined in
packages/core/src/llm/anthropic.ts:105
timeout
• Optional
timeout: number
Defined in
packages/core/src/llm/anthropic.ts:112
topP
• topP: number
Defined in
packages/core/src/llm/anthropic.ts:106
Accessors
metadata
• get
metadata(): Object
Returns
Object
Name | Type |
---|---|
contextWindow | number |
maxTokens | undefined | number |
model | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2" |
temperature | number |
tokenizer | undefined |
topP | number |
Overrides
ToolCallLLM.metadata
Defined in
packages/core/src/llm/anthropic.ts:138
supportToolCall
• get
supportToolCall(): boolean
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
Defined in
packages/core/src/llm/anthropic.ts:134
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming <AnthropicAdditionalChatOptions , ToolCallLLMMessageOptions > |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Overrides
ToolCallLLM.chat
Defined in
packages/core/src/llm/anthropic.ts:213
▸ chat(params
): Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming <AnthropicAdditionalChatOptions , ToolCallLLMMessageOptions > |
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
Defined in
packages/core/src/llm/anthropic.ts:219
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
ToolCallLLM.complete
Defined in
packages/core/src/llm/base.ts:22
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
ToolCallLLM.complete
Defined in
packages/core/src/llm/base.ts:25
formatMessages
▸ formatMessages<Beta
>(messages
): Beta
extends true
? ToolsBetaMessageParam
[] : MessageParam
[]
Type parameters
Name | Type |
---|---|
Beta | false |
Parameters
Name | Type |
---|---|
messages | ChatMessage <ToolCallLLMMessageOptions >[] |
Returns
Beta
extends true
? ToolsBetaMessageParam
[] : MessageParam
[]
Defined in
packages/core/src/llm/anthropic.ts:156
getModelName
▸ getModelName(model
): string
Parameters
Name | Type |
---|---|
model | string |
Returns
string
Defined in
packages/core/src/llm/anthropic.ts:149
streamChat
▸ streamChat(messages
, systemPrompt?
): AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Parameters
Name | Type |
---|---|
messages | ChatMessage <ToolCallLLMMessageOptions >[] |
systemPrompt? | null | string |
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in
packages/core/src/llm/anthropic.ts:329
toTool
▸ toTool(tool
): Tool
Parameters
Name | Type |
---|---|
tool | BaseTool <any > |
Returns
Tool