Skip to main content

Class: Ollama

This class both implements the LLM and Embedding interfaces.

Hierarchy

Implements

  • LLM
  • Omit<OllamaBase, "chat">

Constructors

constructor

new Ollama(params): Ollama

Parameters

NameType
paramsOllamaParams

Returns

Ollama

Overrides

BaseEmbedding.constructor

Defined in

packages/core/src/llm/ollama.ts:75

Properties

embedBatchSize

embedBatchSize: number = DEFAULT_EMBED_BATCH_SIZE

Inherited from

BaseEmbedding.embedBatchSize

Defined in

packages/core/src/embeddings/types.ts:11


hasStreaming

Readonly hasStreaming: true

Defined in

packages/core/src/llm/ollama.ts:61


model

model: string

Defined in

packages/core/src/llm/ollama.ts:66


ollama

ollama: Ollama

Defined in

packages/core/src/llm/ollama.ts:63


options

options: Partial<Omit<Options, "temperature" | "top_p" | "num_ctx">> & Pick<Options, "temperature" | "top_p" | "num_ctx">

Defined in

packages/core/src/llm/ollama.ts:68

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Implementation of

LLM.metadata

Defined in

packages/core/src/llm/ollama.ts:87

Methods

abort

abort(): void

Returns

void

Implementation of

Omit.abort

Defined in

packages/core/src/llm/ollama.ts:209


chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:99

chat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:102


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:140

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:143


copy

copy(request): Promise<StatusResponse>

Parameters

NameType
requestCopyRequest

Returns

Promise<StatusResponse>

Implementation of

Omit.copy

Defined in

packages/core/src/llm/ollama.ts:245


create

create(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestCreateRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.create

Defined in

packages/core/src/llm/ollama.ts:224

create(request): Promise<ProgressResponse>

Parameters

NameType
requestCreateRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Implementation of

Omit.create

Defined in

packages/core/src/llm/ollama.ts:227


delete

delete(request): Promise<StatusResponse>

Parameters

NameType
requestDeleteRequest

Returns

Promise<StatusResponse>

Implementation of

Omit.delete

Defined in

packages/core/src/llm/ollama.ts:242


embeddings

embeddings(request): Promise<EmbeddingsResponse>

Parameters

NameType
requestEmbeddingsRequest

Returns

Promise<EmbeddingsResponse>

Implementation of

Omit.embeddings

Defined in

packages/core/src/llm/ollama.ts:254


encodeImage

encodeImage(image): Promise<string>

Parameters

NameType
imagestring | Uint8Array

Returns

Promise<string>

Implementation of

Omit.encodeImage

Defined in

packages/core/src/llm/ollama.ts:212


generate

generate(request): Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Parameters

NameType
requestGenerateRequest & { stream: true }

Returns

Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Implementation of

Omit.generate

Defined in

packages/core/src/llm/ollama.ts:215

generate(request): Promise<GenerateResponse>

Parameters

NameType
requestGenerateRequest & { stream?: false }

Returns

Promise<GenerateResponse>

Implementation of

Omit.generate

Defined in

packages/core/src/llm/ollama.ts:218


getEmbedding

getEmbedding(prompt): Promise<number[]>

Parameters

NameType
promptstring

Returns

Promise<number[]>

Defined in

packages/core/src/llm/ollama.ts:176


getQueryEmbedding

getQueryEmbedding(query): Promise<number[]>

Parameters

NameType
querystring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getQueryEmbedding

Defined in

packages/core/src/llm/ollama.ts:194


getTextEmbedding

getTextEmbedding(text): Promise<number[]>

Parameters

NameType
textstring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getTextEmbedding

Defined in

packages/core/src/llm/ollama.ts:190


getTextEmbeddings

getTextEmbeddings(texts): Promise<number[][]>

Optionally override this method to retrieve multiple embeddings in a single request

Parameters

NameType
textsstring[]

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddings

Defined in

packages/core/src/embeddings/types.ts:28


getTextEmbeddingsBatch

getTextEmbeddingsBatch(texts, options?): Promise<number[][]>

Get embeddings for a batch of texts

Parameters

NameType
textsstring[]
options?Object
options.logProgress?boolean

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddingsBatch

Defined in

packages/core/src/embeddings/types.ts:44


list

list(): Promise<ListResponse>

Returns

Promise<ListResponse>

Implementation of

Omit.list

Defined in

packages/core/src/llm/ollama.ts:248


pull

pull(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestPullRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.pull

Defined in

packages/core/src/llm/ollama.ts:233

pull(request): Promise<ProgressResponse>

Parameters

NameType
requestPullRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Implementation of

Omit.pull

Defined in

packages/core/src/llm/ollama.ts:236


push

push(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestPushRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.push

Defined in

packages/core/src/llm/ollama.ts:200

push(request): Promise<ProgressResponse>

Parameters

NameType
requestPushRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Implementation of

Omit.push

Defined in

packages/core/src/llm/ollama.ts:203


show

show(request): Promise<ShowResponse>

Parameters

NameType
requestShowRequest

Returns

Promise<ShowResponse>

Implementation of

Omit.show

Defined in

packages/core/src/llm/ollama.ts:251


similarity

similarity(embedding1, embedding2, mode?): number

Parameters

NameTypeDefault value
embedding1number[]undefined
embedding2number[]undefined
modeSimilarityTypeSimilarityType.DEFAULT

Returns

number

Inherited from

BaseEmbedding.similarity

Defined in

packages/core/src/embeddings/types.ts:13


transform

transform(nodes, _options?): Promise<BaseNode<Metadata>[]>

Parameters

NameType
nodesBaseNode<Metadata>[]
_options?any

Returns

Promise<BaseNode<Metadata>[]>

Inherited from

BaseEmbedding.transform

Defined in

packages/core/src/embeddings/types.ts:58