diff options
| author | David Li <taweili@gmail.com> | 2023-12-02 19:48:20 +0800 |
|---|---|---|
| committer | David Li <taweili@gmail.com> | 2023-12-02 19:48:20 +0800 |
| commit | 4ec628bf58a13e3f8d4ce575916b4c4c2bc262f8 (patch) | |
| tree | 1afd6c5140685c3fdf88a2db6dbf0290cc66096b /src | |
| parent | 62888e9e8f3a343b6826b809cbd3d1e5edc112b1 (diff) | |
| download | ollama-logseq-4ec628bf58a13e3f8d4ce575916b4c4c2bc262f8.tar.xz ollama-logseq-4ec628bf58a13e3f8d4ce575916b4c4c2bc262f8.zip | |
When prompt from block, use the properties to pass
parameters to configure Ollama, e.g. speficy the models
and turn the model parameters per Ollama generate API.
Diffstat (limited to 'src')
| -rw-r--r-- | src/ollama.tsx | 64 |
1 files changed, 62 insertions, 2 deletions
diff --git a/src/ollama.tsx b/src/ollama.tsx index b99513c..f792613 100644 --- a/src/ollama.tsx +++ b/src/ollama.tsx @@ -67,6 +67,50 @@ export async function getPageContentFromBlock(b: BlockEntity): Promise<string> { return blockContents.join(" "); } +type OllamaGenerateParameters = { + model?: string; + [key: string]: any; +} + +async function ollamaGenerate(prompt: string, parameters?: OllamaGenerateParameters) { + + if (!logseq.settings) { + throw new Error("Couldn't find ollama-logseq settings") + } + + let params = parameters || {}; + if (params.model === undefined) { + params.model = logseq.settings.model; + } + params.prompt = prompt + params.stream = false + + console.log(params) + + try { + const response = await fetch(`http://${logseq.settings.host}/api/generate`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(params) + }) + if (!response.ok) { + console.log("Error in Ollama request: " + response.statusText) + logseq.UI.showMsg("Error in Ollama request") + throw new Error("Error in Ollama request: " + response.statusText) + } + const data = await response.json() + + console.log(data) + + return data.response + } catch (e: any) { + console.log(e) + logseq.UI.showMsg("Error in Ollama request") + } +} + async function promptLLM(prompt: string) { if (!logseq.settings) { throw new Error("Couldn't find logseq settings"); @@ -149,11 +193,27 @@ export async function summarizeBlock() { } } +async function getOllamaParametersFromBlockProperties(b: BlockEntity) { + const properties = await logseq.Editor.getBlockProperties(b.uuid); + const ollamaParameters: OllamaGenerateParameters = {} + const prefix = 'ollamaGenerate' + for (const property in properties) { + if (property.startsWith(prefix)) { + const key = property.replace(prefix, '').toLowerCase() + ollamaParameters[key] = properties[property] + } + } + return ollamaParameters +} + export async function promptFromBlockEvent(b: IHookEvent) { try { const currentBlock = await logseq.Editor.getBlock(b.uuid) - const answerBlock = await logseq.Editor.insertBlock(currentBlock!.uuid, '⌛Generating ...', { before: false }) - const response = await promptLLM(`${currentBlock!.content}`); + const answerBlock = await logseq.Editor.insertBlock(currentBlock!.uuid, 'đŸ¦™Generating ...', { before: false }) + const params = await getOllamaParametersFromBlockProperties(currentBlock!) + const prompt = currentBlock!.content.replace(/^.*::.*$/gm, '') // nasty hack to remove properties from block content + const response = await ollamaGenerate(prompt, params); + await logseq.Editor.updateBlock(answerBlock!.uuid, `${response}`) } catch (e: any) { logseq.UI.showMsg(e.toString(), 'warning') |
