diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/ollama.tsx | 14 |
1 files changed, 4 insertions, 10 deletions
diff --git a/src/ollama.tsx b/src/ollama.tsx index a59b03b..38cb0f6 100644 --- a/src/ollama.tsx +++ b/src/ollama.tsx @@ -73,7 +73,6 @@ type OllamaGenerateParameters = { } async function ollamaGenerate(prompt: string, parameters?: OllamaGenerateParameters) { - if (!logseq.settings) { throw new Error("Couldn't find ollama-logseq settings") } @@ -85,8 +84,6 @@ async function ollamaGenerate(prompt: string, parameters?: OllamaGenerateParamet params.prompt = prompt params.stream = false - console.log(params) - try { const response = await fetch(`http://${logseq.settings.host}/api/generate`, { method: 'POST', @@ -96,16 +93,14 @@ async function ollamaGenerate(prompt: string, parameters?: OllamaGenerateParamet body: JSON.stringify(params) }) if (!response.ok) { - console.log("Error in Ollama request: " + response.statusText) - logseq.UI.showMsg("Error in Ollama request") + logseq.UI.showMsg("Coudln't fulfull request make sure that ollama service is running and make sure there is no typo in host or model name") throw new Error("Error in Ollama request: " + response.statusText) } const data = await response.json() - return data } catch (e: any) { - console.log(e) - logseq.UI.showMsg("Error in Ollama request") + console.error("ERROR: ", e) + logseq.App.showMsg("Coudln't fulfull request make sure that ollama service is running and make sure there is no typo in host or model name") } } @@ -126,9 +121,8 @@ async function promptLLM(prompt: string) { }), }) if (!response.ok) { - console.log("Error: couldn't fulfill request") logseq.App.showMsg("Coudln't fulfull request make sure that ollama service is running and make sure there is no typo in host or model name") - throw new Error('Network response was not ok'); + throw new Error("Error in Ollama request: " + response.statusText) } const data = await response.json(); |
