Ana içeriğe geç

Class: ReplicateLLM

Replicate LLM implementation used

Hierarchy

  • BaseLLM

    ReplicateLLM

Constructors

constructor

new ReplicateLLM(init?): ReplicateLLM

Parameters

NameType
init?Partial<ReplicateLLM> & { noWarn?: boolean }

Returns

ReplicateLLM

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/replicate_ai.ts:115

Properties

chatStrategy

chatStrategy: ReplicateChatStrategy

Defined in

packages/core/src/llm/replicate_ai.ts:109


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/replicate_ai.ts:112


model

model: "Llama-2-70b-chat-old" | "Llama-2-70b-chat-4bit" | "Llama-2-13b-chat-old" | "Llama-2-13b-chat-4bit" | "Llama-2-7b-chat-old" | "Llama-2-7b-chat-4bit" | "llama-3-70b-instruct" | "llama-3-8b-instruct"

Defined in

packages/core/src/llm/replicate_ai.ts:108


replicateSession

replicateSession: ReplicateSession

Defined in

packages/core/src/llm/replicate_ai.ts:113


temperature

temperature: number

Defined in

packages/core/src/llm/replicate_ai.ts:110


topP

topP: number

Defined in

packages/core/src/llm/replicate_ai.ts:111

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"Llama-2-70b-chat-old" | "Llama-2-70b-chat-4bit" | "Llama-2-13b-chat-old" | "Llama-2-13b-chat-4bit" | "Llama-2-7b-chat-old" | "Llama-2-7b-chat-4bit" | "llama-3-70b-instruct" | "llama-3-8b-instruct"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/replicate_ai.ts:140

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/replicate_ai.ts:307

chat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/replicate_ai.ts:310


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:22

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:25


mapMessageTypeA16Z

mapMessageTypeA16Z(messageType): string

Parameters

NameType
messageTypeMessageType

Returns

string

Defined in

packages/core/src/llm/replicate_ai.ts:217


mapMessagesToPrompt

mapMessagesToPrompt(messages): Object

Parameters

NameType
messagesChatMessage[]

Returns

Object

NameType
promptstring
systemPromptundefined | MessageContent

Defined in

packages/core/src/llm/replicate_ai.ts:151


mapMessagesToPromptA16Z

mapMessagesToPromptA16Z(messages): Object

Parameters

NameType
messagesChatMessage[]

Returns

Object

NameType
promptstring
systemPromptundefined

Defined in

packages/core/src/llm/replicate_ai.ts:203


mapMessagesToPromptLlama3

mapMessagesToPromptLlama3(messages): Object

Parameters

NameType
messagesChatMessage[]

Returns

Object

NameType
promptstring
systemPromptundefined

Defined in

packages/core/src/llm/replicate_ai.ts:177


mapMessagesToPromptMeta

mapMessagesToPromptMeta(messages, opts?): Object

Parameters

NameType
messagesChatMessage[]
opts?Object
opts.replicate4Bit?boolean
opts.withBos?boolean
opts.withNewlines?boolean

Returns

Object

NameType
promptstring
systemPromptundefined | MessageContent

Defined in

packages/core/src/llm/replicate_ai.ts:230