From de004a3cdf3f30b6bfb352f8cdbba0435aab2273 Mon Sep 17 00:00:00 2001 From: Evan Simkowitz Date: Fri, 18 Oct 2024 12:33:08 -0700 Subject: [PATCH] Remove ollama AI preset (#1068) I added this as an example, but it fails if the user doesn't have ollama and/or llama:3.1 so I'm removing it --- pkg/wconfig/defaultconfig/presets.json | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pkg/wconfig/defaultconfig/presets.json b/pkg/wconfig/defaultconfig/presets.json index 78da237fe..38d16d494 100644 --- a/pkg/wconfig/defaultconfig/presets.json +++ b/pkg/wconfig/defaultconfig/presets.json @@ -107,12 +107,5 @@ "ai:model": "gpt-4o-mini", "ai:maxtokens": 2048, "ai:timeoutms": 60000 - }, - "ai@ollama-llama3.1": { - "display:name": "ollama - llama3.1", - "display:order": 0, - "ai:*": true, - "ai:baseurl": "http://localhost:11434/v1", - "ai:model": "llama3.1:latest" } }