Skip to main content

Class: Ollama

This class both implements the LLM and Embedding interfaces.

Extends

Extended by

Implements

  • LLM
  • Omit<OllamaBase, "chat">

Constructors

new Ollama()

new Ollama(params): Ollama

Parameters

params: OllamaParams

Returns

Ollama

Overrides

BaseEmbedding.constructor

Defined in

packages/llamaindex/src/llm/ollama.ts:75

Properties

embedBatchSize

embedBatchSize: number

Inherited from

BaseEmbedding.embedBatchSize

Defined in

packages/core/dist/embeddings/index.d.ts:34


embedInfo?

optional embedInfo: EmbeddingInfo

Inherited from

BaseEmbedding.embedInfo

Defined in

packages/core/dist/embeddings/index.d.ts:35


getTextEmbeddings()

getTextEmbeddings: (texts) => Promise<number[][]>

Optionally override this method to retrieve multiple embeddings in a single request

Parameters

texts: string[]

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddings

Defined in

packages/core/dist/embeddings/index.d.ts:43


hasStreaming

readonly hasStreaming: true = true

Defined in

packages/llamaindex/src/llm/ollama.ts:61


model

model: string

Defined in

packages/llamaindex/src/llm/ollama.ts:66


ollama

ollama: Ollama

Defined in

packages/llamaindex/src/llm/ollama.ts:63


options

options: Partial<Omit<Options, "temperature" | "top_p" | "num_ctx">> & Pick<Options, "temperature" | "top_p" | "num_ctx">

Defined in

packages/llamaindex/src/llm/ollama.ts:68

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Implementation of

LLM.metadata

Defined in

packages/llamaindex/src/llm/ollama.ts:87

Methods

abort()

abort(): void

Returns

void

Implementation of

Omit.abort

Defined in

packages/llamaindex/src/llm/ollama.ts:205


chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Implementation of

LLM.chat

Defined in

packages/llamaindex/src/llm/ollama.ts:99

chat(params)

chat(params): Promise<ChatResponse<object>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Implementation of

LLM.chat

Defined in

packages/llamaindex/src/llm/ollama.ts:102


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Implementation of

LLM.complete

Defined in

packages/llamaindex/src/llm/ollama.ts:140

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Implementation of

LLM.complete

Defined in

packages/llamaindex/src/llm/ollama.ts:143


copy()

copy(request): Promise<StatusResponse>

Parameters

request: CopyRequest

Returns

Promise<StatusResponse>

Implementation of

Omit.copy

Defined in

packages/llamaindex/src/llm/ollama.ts:241


create()

create(request)

create(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: CreateRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.create

Defined in

packages/llamaindex/src/llm/ollama.ts:220

create(request)

create(request): Promise<ProgressResponse>

Parameters

request: CreateRequest & object

Returns

Promise<ProgressResponse>

Implementation of

Omit.create

Defined in

packages/llamaindex/src/llm/ollama.ts:223


delete()

delete(request): Promise<StatusResponse>

Parameters

request: DeleteRequest

Returns

Promise<StatusResponse>

Implementation of

Omit.delete

Defined in

packages/llamaindex/src/llm/ollama.ts:238


embeddings()

embeddings(request): Promise<EmbeddingsResponse>

Parameters

request: EmbeddingsRequest

Returns

Promise<EmbeddingsResponse>

Implementation of

Omit.embeddings

Defined in

packages/llamaindex/src/llm/ollama.ts:250


encodeImage()

encodeImage(image): Promise<string>

Parameters

image: string | Uint8Array

Returns

Promise<string>

Implementation of

Omit.encodeImage

Defined in

packages/llamaindex/src/llm/ollama.ts:208


generate()

generate(request)

generate(request): Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Parameters

request: GenerateRequest & object

Returns

Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Implementation of

Omit.generate

Defined in

packages/llamaindex/src/llm/ollama.ts:211

generate(request)

generate(request): Promise<GenerateResponse>

Parameters

request: GenerateRequest & object

Returns

Promise<GenerateResponse>

Implementation of

Omit.generate

Defined in

packages/llamaindex/src/llm/ollama.ts:214


getQueryEmbedding()

getQueryEmbedding(query): Promise<null | number[]>

Parameters

query: MessageContentDetail

Returns

Promise<null | number[]>

Inherited from

BaseEmbedding.getQueryEmbedding

Defined in

packages/core/dist/embeddings/index.d.ts:38


getTextEmbedding()

getTextEmbedding(text): Promise<number[]>

Parameters

text: string

Returns

Promise<number[]>

Overrides

BaseEmbedding.getTextEmbedding

Defined in

packages/llamaindex/src/llm/ollama.ts:190


getTextEmbeddingsBatch()

getTextEmbeddingsBatch(texts, options?): Promise<number[][]>

Get embeddings for a batch of texts

Parameters

texts: string[]

options?: BaseEmbeddingOptions

Returns

Promise<number[][]>

Inherited from

BaseEmbedding.getTextEmbeddingsBatch

Defined in

packages/core/dist/embeddings/index.d.ts:49


list()

list(): Promise<ListResponse>

Returns

Promise<ListResponse>

Implementation of

Omit.list

Defined in

packages/llamaindex/src/llm/ollama.ts:244


pull()

pull(request)

pull(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: PullRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.pull

Defined in

packages/llamaindex/src/llm/ollama.ts:229

pull(request)

pull(request): Promise<ProgressResponse>

Parameters

request: PullRequest & object

Returns

Promise<ProgressResponse>

Implementation of

Omit.pull

Defined in

packages/llamaindex/src/llm/ollama.ts:232


push()

push(request)

push(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: PushRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Implementation of

Omit.push

Defined in

packages/llamaindex/src/llm/ollama.ts:196

push(request)

push(request): Promise<ProgressResponse>

Parameters

request: PushRequest & object

Returns

Promise<ProgressResponse>

Implementation of

Omit.push

Defined in

packages/llamaindex/src/llm/ollama.ts:199


show()

show(request): Promise<ShowResponse>

Parameters

request: ShowRequest

Returns

Promise<ShowResponse>

Implementation of

Omit.show

Defined in

packages/llamaindex/src/llm/ollama.ts:247


similarity()

similarity(embedding1, embedding2, mode?): number

Parameters

embedding1: number[]

embedding2: number[]

mode?: SimilarityType

Returns

number

Inherited from

BaseEmbedding.similarity

Defined in

packages/core/dist/embeddings/index.d.ts:36


transform()

transform(nodes, options?): Promise<BaseNode<Metadata>[]>

Parameters

nodes: BaseNode<Metadata>[]

options?: BaseEmbeddingOptions

Returns

Promise<BaseNode<Metadata>[]>

Inherited from

BaseEmbedding.transform

Defined in

packages/core/dist/embeddings/index.d.ts:50


truncateMaxTokens()

truncateMaxTokens(input): string[]

Parameters

input: string[]

Returns

string[]

Inherited from

BaseEmbedding.truncateMaxTokens

Defined in

packages/core/dist/embeddings/index.d.ts:51