Class: FireworksLLM
Hierarchy
-
↳
FireworksLLM
Constructors
constructor
• new FireworksLLM(init?
): FireworksLLM
Parameters
Name | Type |
---|---|
init? | Partial <OpenAI > |
Returns
Overrides
Defined in
packages/core/src/llm/fireworks.ts:5
Properties
additionalChatOptions
• Optional
additionalChatOptions: OpenAIAdditionalChatOptions
Inherited from
Defined in
packages/core/src/llm/openai.ts:167
additionalSessionOptions
• Optional
additionalSessionOptions: Omit
<Partial
<ClientOptions
>, "apiKey"
| "timeout"
| "maxRetries"
>
Inherited from
OpenAI.additionalSessionOptions
Defined in
packages/core/src/llm/openai.ts:174
apiKey
• Optional
apiKey: string
= undefined
Inherited from
Defined in
packages/core/src/llm/openai.ts:170
maxRetries
• maxRetries: number
Inherited from
Defined in
packages/core/src/llm/openai.ts:171
maxTokens
• Optional
maxTokens: number
Inherited from
Defined in
packages/core/src/llm/openai.ts:166
model
• model: string
Inherited from
Defined in
packages/core/src/llm/openai.ts:163
session
• session: OpenAISession
Inherited from
Defined in
packages/core/src/llm/openai.ts:173
temperature
• temperature: number
Inherited from
Defined in
packages/core/src/llm/openai.ts:164
timeout
• Optional
timeout: number
Inherited from
Defined in
packages/core/src/llm/openai.ts:172
topP
• topP: number
Inherited from
Defined in
packages/core/src/llm/openai.ts:165
Accessors
metadata
• get
metadata(): LLMMetadata
Returns
Inherited from
OpenAI.metadata
Defined in
packages/core/src/llm/openai.ts:236
supportToolCall
• get
supportToolCall(): boolean
Returns
boolean
Inherited from
OpenAI.supportToolCall
Defined in
packages/core/src/llm/openai.ts:232
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming <OpenAIAdditionalChatOptions , ToolCallLLMMessageOptions > |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>>
Inherited from
Defined in
packages/core/src/llm/openai.ts:313
▸ chat(params
): Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming <OpenAIAdditionalChatOptions , ToolCallLLMMessageOptions > |
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Inherited from
Defined in
packages/core/src/llm/openai.ts:319
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
Defined in
packages/core/src/llm/base.ts:22
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/src/llm/base.ts:25
streamChat
▸ streamChat(baseRequestParams
): AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Parameters
Name | Type |
---|---|
baseRequestParams | ChatCompletionCreateParams |
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Inherited from
Defined in
packages/core/src/llm/openai.ts:394
toOpenAIMessage
▸ toOpenAIMessage(messages
): ChatCompletionMessageParam
[]
Parameters
Name | Type |
---|---|
messages | ChatMessage <ToolCallLLMMessageOptions >[] |
Returns
ChatCompletionMessageParam
[]
Inherited from
Defined in
packages/core/src/llm/openai.ts:264
toOpenAIRole
▸ toOpenAIRole(messageType
): ChatCompletionRole
Parameters
Name | Type |
---|---|
messageType | MessageType |
Returns
ChatCompletionRole
Inherited from
Defined in
packages/core/src/llm/openai.ts:251
toTool
▸ toTool(tool
): ChatCompletionTool
Parameters
Name | Type |
---|---|
tool | BaseTool <any > |
Returns
ChatCompletionTool