Ga naar hoofdinhoud

Class: Anthropic

Hierarchy

  • ToolCallLLM<AnthropicAdditionalChatOptions>

    Anthropic

Constructors

constructor

new Anthropic(init?): Anthropic

Parameters

NameType
init?Partial<Anthropic>

Returns

Anthropic

Overrides

ToolCallLLM&lt;AnthropicAdditionalChatOptions&gt;.constructor

Defined in

packages/core/src/llm/anthropic.ts:114

Properties

apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/anthropic.ts:109


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/anthropic.ts:110


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/anthropic.ts:106


model

model: "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/core/src/llm/anthropic.ts:103


session

session: AnthropicSession

Defined in

packages/core/src/llm/anthropic.ts:112


temperature

temperature: number

Defined in

packages/core/src/llm/anthropic.ts:104


timeout

Optional timeout: number

Defined in

packages/core/src/llm/anthropic.ts:111


topP

topP: number

Defined in

packages/core/src/llm/anthropic.ts:105

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

ToolCallLLM.metadata

Defined in

packages/core/src/llm/anthropic.ts:137


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/core/src/llm/anthropic.ts:133

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Parameters

NameType
paramsLLMChatParamsStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Overrides

ToolCallLLM.chat

Defined in

packages/core/src/llm/anthropic.ts:212

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/core/src/llm/anthropic.ts:218


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/src/llm/base.ts:22

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/src/llm/base.ts:25


formatMessages

formatMessages<Beta>(messages): Beta extends true ? ToolsBetaMessageParam[] : MessageParam[]

Type parameters

NameType
Betafalse

Parameters

NameType
messagesChatMessage<ToolCallLLMMessageOptions>[]

Returns

Beta extends true ? ToolsBetaMessageParam[] : MessageParam[]

Defined in

packages/core/src/llm/anthropic.ts:155


getModelName

getModelName(model): string

Parameters

NameType
modelstring

Returns

string

Defined in

packages/core/src/llm/anthropic.ts:148


streamChat

streamChat(messages, systemPrompt?): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Parameters

NameType
messagesChatMessage<ToolCallLLMMessageOptions>[]
systemPrompt?null | string

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in

packages/core/src/llm/anthropic.ts:323


toTool

toTool(tool): Tool

Parameters

NameType
toolBaseTool<any>

Returns

Tool

Defined in

packages/core/src/llm/anthropic.ts:354