Ollama
Use local Ollama models
Ollama
Use local Ollama models
yaml
type: io.kestra.plugin.ai.provider.OllamaExamples
yaml
id: chat_completion
namespace: company.ai
inputs:
- id: prompt
type: STRING
tasks:
- id: chat_completion
type: io.kestra.plugin.ai.completion.ChatCompletion
provider:
type: io.kestra.plugin.ai.provider.Ollama
modelName: llama3
endpoint: http://localhost:11434
thinkingEnabled: true
returnThinking: true
messages:
- type: SYSTEM
content: You are a helpful assistant, answer concisely, avoid overly casual language or unnecessary verbosity.
- type: USER
content: "{{ inputs.prompt }}"