Properties
baseUrl
baseUrl: string = "http://localhost:11434"
keepAlive
keepAlive: string = "5m"
model
model: string = "llama2"
Optional
embeddingOnly
embeddingOnly?: boolean
Optional
f16KV
f16KV?: boolean
Optional
format
format?: string & Record<never, never> | "json"
Optional
frequencyPenalty
frequencyPenalty?: number
Optional
headers
headers?: Record<string, string>
Optional
logitsAll
logitsAll?: boolean
Optional
lowVram
lowVram?: boolean
Optional
mainGpu
mainGpu?: number
Optional
mirostat
mirostat?: number
Optional
mirostatEta
mirostatEta?: number
Optional
mirostatTau
mirostatTau?: number
Optional
numBatch
numBatch?: number
Optional
numCtx
numCtx?: number
Optional
numGpu
numGpu?: number
Optional
numGqa
numGqa?: number
Optional
numKeep
numKeep?: number
Optional
numPredict
numPredict?: number
Optional
numThread
numThread?: number
Optional
penalizeNewline
penalizeNewline?: boolean
Optional
presencePenalty
presencePenalty?: number
Optional
repeatLastN
repeatLastN?: number
Optional
repeatPenalty
repeatPenalty?: number
Optional
ropeFrequencyBase
ropeFrequencyBase?: number
Optional
ropeFrequencyScale
ropeFrequencyScale?: number
Optional
stop
stop?: string[]
Optional
temperature
temperature?: number
Optional
tfsZ
tfsZ?: number
Optional
topK
topK?: number
Optional
topP
topP?: number
Optional
typicalP
typicalP?: number
Optional
useMLock
useMLock?: boolean
Optional
useMMap
useMMap?: boolean
Optional
vocabOnly
vocabOnly?: boolean
Class that represents the Ollama language model. It extends the base LLM class and implements the OllamaInput interface.
Example