feat(config): enhance Ollama configuration with dynamic path support

This commit is contained in:
2025-10-05 20:13:42 +02:00
parent 4e4389a03f
commit e628816ea8
3 changed files with 11 additions and 6 deletions

View File

@@ -2,23 +2,26 @@
"audio_file": "/tmp/pyvtt_recording.wav",
"output_file": "/tmp/pyvtt_transcript.txt",
"whisper_path": "/path/to/whisper-cli",
"language": "en",
"socket_path": "/tmp/pyvtt.sock",
"ollama_url": "http://localhost",
"ollama_path": "/api/chat",
"ollama_port": 12345,
"presets": [
{
"name": "Default",
"language": "en",
"whisper_model": "/path/to/default-whisper-model.bin",
"ollama_model": "default-model",
"ollama_prompt": "Provide a detailed response to the following text:\n\n"
"ollama": "disable"
},
{
"name": "Quick English",
"whisper_model": "/path/to/quick-whisper-model.bin",
"ollama_model": "quick-model",
"ollama_prompt": "Quickly correct the following English text for grammar and punctuation:\n\n"
"ollama_model": "gemma3:4b",
"ollama_context": 131072,
"ollama_prompt": [
"Quickly correct the following English text for grammar and punctuation:\n",
"\n"
]
},
{
"name": "German Correction",

View File

@@ -14,6 +14,7 @@ class OllamaClient:
"""
self.base_url = config.ollama_url
self.port = config.ollama_port
self.path = config.ollama_path
def send_chat(
self,
@@ -52,7 +53,7 @@ class OllamaClient:
"stream": False
}
endpoint = f"{self.base_url}:{self.port}/api/chat"
endpoint = f"{self.base_url}:{self.port}{self.path}"
# Anfrage an Ollama senden und Antwort extrahieren
try:

View File

@@ -20,6 +20,7 @@ class AppConfig(BaseModel):
whisper_path: str
socket_path: str
ollama_url: str
ollama_path: str
ollama_port: int
journal_path: str
presets: List[PresetConfig]