Skip to main content

Class: Ollama

This class both implements the LLM and Embedding interfaces.

Hierarchy

Implements

Constructors

constructor

new Ollama(params): Ollama

Parameters

NameType
paramsOllamaParams

Returns

Ollama

Overrides

BaseEmbedding.constructor

Defined in

packages/core/src/llm/ollama.ts:57

Properties

embedBatchSize

embedBatchSize: number = DEFAULT_EMBED_BATCH_SIZE

Inherited from

BaseEmbedding.embedBatchSize

Defined in

packages/core/src/embeddings/types.ts:11


hasStreaming

Readonly hasStreaming: true

Defined in

packages/core/src/llm/ollama.ts:45


model

model: string

Defined in

packages/core/src/llm/ollama.ts:48


options

options: Partial<Omit<Options, "temperature" | "top_p" | "num_ctx">> & Pick<Options, "temperature" | "top_p" | "num_ctx">

Defined in

packages/core/src/llm/ollama.ts:50

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Implementation of

LLM.metadata

Defined in

packages/core/src/llm/ollama.ts:68

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:80

chat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:83


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:121

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:124


getEmbedding

getEmbedding(prompt): Promise<number[]>

Parameters

NameType
promptstring

Returns

Promise<number[]>

Defined in

packages/core/src/llm/ollama.ts:157


getQueryEmbedding

getQueryEmbedding(query): Promise<number[]>

Parameters

NameType
querystring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getQueryEmbedding

Defined in

packages/core/src/llm/ollama.ts:175


getTextEmbedding

getTextEmbedding(text): Promise<number[]>

Parameters

NameType
textstring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getTextEmbedding

Defined in

packages/core/src/llm/ollama.ts:171


getTextEmbeddings

getTextEmbeddings(texts): Promise<number[][]>

Optionally override this method to retrieve multiple embeddings in a single request

Parameters

NameType
textsstring[]

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddings

Defined in

packages/core/src/embeddings/types.ts:28


getTextEmbeddingsBatch

getTextEmbeddingsBatch(texts, options?): Promise<number[][]>

Get embeddings for a batch of texts

Parameters

NameType
textsstring[]
options?Object
options.logProgress?boolean

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddingsBatch

Defined in

packages/core/src/embeddings/types.ts:44


similarity

similarity(embedding1, embedding2, mode?): number

Parameters

NameTypeDefault value
embedding1number[]undefined
embedding2number[]undefined
modeSimilarityTypeSimilarityType.DEFAULT

Returns

number

Inherited from

BaseEmbedding.similarity

Defined in

packages/core/src/embeddings/types.ts:13


transform

transform(nodes, _options?): Promise<BaseNode<Metadata>[]>

Parameters

NameType
nodesBaseNode<Metadata>[]
_options?any

Returns

Promise<BaseNode<Metadata>[]>

Inherited from

BaseEmbedding.transform

Defined in

packages/core/src/embeddings/types.ts:58


copy

copy(source, destination): Promise<StatusResponse>

Parameters

NameType
sourcestring
destinationstring

Returns

Promise<StatusResponse>

Defined in

packages/core/src/llm/ollama.ts:223


create

create(modelName, options?): Promise<ProgressResponse>

Parameters

NameType
modelNamestring
options?Omit<CreateRequest, "model"> & { stream: false }

Returns

Promise<ProgressResponse>

Defined in

packages/core/src/llm/ollama.ts:192

create(modelName, options): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
modelNamestring
optionsOmit<CreateRequest, "model"> & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Defined in

packages/core/src/llm/ollama.ts:198


delete

delete(modelName): Promise<StatusResponse>

Parameters

NameType
modelNamestring

Returns

Promise<StatusResponse>

Defined in

packages/core/src/llm/ollama.ts:217


detail

detail(modelName, options?): Promise<ShowResponse>

Parameters

NameType
modelNamestring
options?Omit<ShowRequest, "model">

Returns

Promise<ShowResponse>

Defined in

packages/core/src/llm/ollama.ts:185


list

list(): Promise<ModelResponse[]>

Returns

Promise<ModelResponse[]>

Defined in

packages/core/src/llm/ollama.ts:180


pull

pull(modelName, options?): Promise<ProgressResponse>

Parameters

NameType
modelNamestring
options?Omit<CreateRequest, "model"> & { stream: false }

Returns

Promise<ProgressResponse>

Defined in

packages/core/src/llm/ollama.ts:230

pull(modelName, options): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
modelNamestring
optionsOmit<CreateRequest, "model"> & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Defined in

packages/core/src/llm/ollama.ts:236


push

push(modelName, options?): Promise<ProgressResponse>

Parameters

NameType
modelNamestring
options?Omit<CreateRequest, "model"> & { stream: false }

Returns

Promise<ProgressResponse>

Defined in

packages/core/src/llm/ollama.ts:255

push(modelName, options): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
modelNamestring
optionsOmit<CreateRequest, "model"> & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Defined in

packages/core/src/llm/ollama.ts:261