aboutsummaryrefslogtreecommitdiff
path: root/src/ollama.tsx
blob: a08c6e3143b57767b8b7d2ce3b98c93eadf75c35 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import { SettingSchemaDesc } from "@logseq/libs/dist/LSPlugin.user";

const delay = (t = 100) => new Promise(r => setTimeout(r, t))


let settings: SettingSchemaDesc[] = [
  {
    key: "host",
    type: "string",
    title: "Host",
    description: "Set the host of your ollama model",
    default: "localhost:11434"
  },
  {
    key: "model",
    type: "string",
    title: "LLM Model",
    description: "Set your desired model to use ollama",
    default: "mistral:instruct"
  },
]

async function promptLLM(url: string, prompt: string, model: string) {


  const response = await fetch('http://localhost:11434/api/generate', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
    },
    body: JSON.stringify({
      model: model,
      prompt: prompt,
      stream: false,
    }),
  })
  if (!response.ok) {
    throw new Error('Network response was not ok');
  }

  const data = await response.json();

  return data.response;

}

export async function defineWord(word: string) {
  askAI(`Define this following ${word}`)
}

export async function summarize() {
  await delay(300)

  try {
    const currentSelectedBlocks = await logseq.Editor.getCurrentPageBlocksTree()
    let blocksContent = ""
    if (currentSelectedBlocks) {
      let lastBlock: any = currentSelectedBlocks[currentSelectedBlocks.length - 1]
      for (const block of currentSelectedBlocks) {
        blocksContent += block.content + "/n"
      }
      if (lastBlock) {
        lastBlock = await logseq.Editor.insertBlock(lastBlock.uuid, '🚀 Summarizing....', { before: false })
      }

      const summary = await promptLLM("localhost:11434", `Summarize the following ${blocksContent}`, "mistral:instruct")

      await logseq.Editor.updateBlock(lastBlock.uuid, `Summary: ${summary}`)
    }

  } catch (e) {
    logseq.App.showMsg(e.toString(), 'warning')
    console.error(e)
  }
}

export async function askAI(prompt: string) {
  await delay(300)

  try {
    const currentSelectedBlocks = await logseq.Editor.getCurrentPageBlocksTree()
    if (currentSelectedBlocks) {
      let lastBlock: any = currentSelectedBlocks[currentSelectedBlocks.length - 1]
      if (lastBlock) {
        lastBlock = await logseq.Editor.insertBlock(lastBlock.uuid, 'Generating....', { before: true })
      }
      const response = await promptLLM("localhost:11434", prompt, "mistral:instruct")
      await logseq.Editor.updateBlock(lastBlock.uuid, response)
    }

  } catch (e) {
    logseq.App.showMsg(e.toString(), 'warning')
    console.error(e)
  }
}

export async function DivideTaskIntoSubTasks() {
  try {
    const currentBlock = await logseq.Editor.getCurrentBlock()
    if (currentBlock) {
      // const block = await logseq.Editor.insertBlock(currentBlock.uuid, 'Generating....', { before: false })
      logseq.App.showMsg(`
          [:div.p-2
            [:h1 "currentBlock content"]
            [:h2.text-xl "Divide this task into subtasks: ${currentBlock?.content}"]]
        `)
      const response = await promptLLM("localhost:11434", `Divide this task into subtasks with numbers: ${currentBlock.content}`, "mistral:instruct")
      for (const todo of response.split("\n")) {
        const block = await logseq.Editor.insertBlock(currentBlock.uuid, `TODO ${todo.slice(3)}`, { before: false })
      }
    }
  } catch (e) {
    logseq.App.showMsg(e.toString(), 'warning')
    console.error(e)
  }
}