aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authoromagdy7 <omar.professional8777@gmail.com>2023-11-03 17:57:35 +0200
committeromagdy7 <omar.professional8777@gmail.com>2023-11-03 17:57:35 +0200
commitdd4299a4de8a31802a4551d631c67836484d9699 (patch)
treebae16e70a7000533316b01ae30e8982d20255d51 /src
parent189d9e8173049aac2cb9f0aea923e339bfc76de7 (diff)
downloadollama-logseq-dd4299a4de8a31802a4551d631c67836484d9699.tar.xz
ollama-logseq-dd4299a4de8a31802a4551d631c67836484d9699.zip
Moved to react instead of vanilla js and added the basic skeleton of the plgin
Diffstat (limited to 'src')
-rw-r--r--src/App.tsx49
-rw-r--r--src/components/CommandPallete.tsx96
-rw-r--r--src/components/PromptAI.tsx45
-rw-r--r--src/index.css4
-rw-r--r--src/main.tsx93
-rw-r--r--src/ollama.tsx116
-rw-r--r--src/utils.ts24
7 files changed, 427 insertions, 0 deletions
diff --git a/src/App.tsx b/src/App.tsx
new file mode 100644
index 0000000..5b5fca0
--- /dev/null
+++ b/src/App.tsx
@@ -0,0 +1,49 @@
+import React, { useEffect, useRef, useState } from "react";
+import CommandPalette from "./components/CommandPallete";
+import { useAppVisible } from "./utils";
+
+const options = [
+ { label: 'Ask Ai' },
+ { label: 'Define' },
+ { label: 'Divide into subtasks' },
+ { label: 'Summarize' },
+];
+
+
+async function ollamaUI() {
+ console.log("Hello")
+ logseq.showMainUI({ autoFocus: true })
+ setTimeout(() => {
+ document.getElementById("ai-input")?.focus()
+ console.log(document.getElementById("ai-input"))
+ }, 300)
+}
+
+function App() {
+ const innerRef = useRef<HTMLDivElement>(null);
+ const visible = useAppVisible();
+
+ useEffect(() => {
+ logseq.Editor.registerSlashCommand("ollama", ollamaUI)
+ }, [])
+
+ if (visible) {
+ return (
+ <main
+ className="fixed inset-0 flex items-center justify-center"
+ onClick={(e) => {
+ if (!innerRef.current?.contains(e.target as any)) {
+ window.logseq.hideMainUI();
+ }
+ }}
+ >
+ <div ref={innerRef} className="text-white text-2xl">
+ <CommandPalette options={options} />
+ </div>
+ </main>
+ );
+ }
+ return null;
+}
+
+export default App;
diff --git a/src/components/CommandPallete.tsx b/src/components/CommandPallete.tsx
new file mode 100644
index 0000000..e4f5924
--- /dev/null
+++ b/src/components/CommandPallete.tsx
@@ -0,0 +1,96 @@
+import React, { useState, useEffect, useRef } from 'react';
+import { DivideTaskIntoSubTasks, summarize } from '../ollama';
+import { PromptAI } from './PromptAI';
+
+
+function CommandPalette({ options }) {
+ console.log("rendered commana pallate")
+ const [inputValue, setInputValue] = useState('');
+ const [selectedOption, setSelectedOption] = useState<{ label: string }>({ label: "Ask Ai" });
+ const [filteredOptions, setFilteredOptions] = useState(options);
+ const [isExecute, setIsExecute] = useState(false)
+ const inputRef = useRef(null);
+
+ useEffect(() => {
+ // Initially, select the first option.
+ if (filteredOptions.length > 0) {
+ setSelectedOption(filteredOptions[0]);
+ }
+ }, [filteredOptions]);
+
+
+ const handleInputChange = (e) => {
+ const query = e.target.value;
+ setInputValue(query);
+
+ // Filter options based on the input.
+ const results = options.filter((option: { label: string }) =>
+ option.label.toLowerCase().includes(query.toLowerCase())
+ );
+ setFilteredOptions(results);
+ };
+
+ const handleKeyDown = (e: KeyboardEvent) => {
+ if (e.key === 'ArrowUp' || e.key === 'ArrowDown' || e.key === 'Tab') {
+ e.preventDefault();
+
+ const currentIndex = filteredOptions.indexOf(selectedOption);
+ let newIndex = currentIndex;
+
+ if (e.key === 'ArrowUp' || (e.shiftKey && e.key == 'Tab')) {
+ newIndex = (currentIndex - 1 + filteredOptions.length) % filteredOptions.length;
+ } else if (e.key === 'ArrowDown' || e.key === 'Tab') {
+ newIndex = (currentIndex + 1) % filteredOptions.length;
+ }
+
+ setSelectedOption(filteredOptions[newIndex]);
+ } else if (e.key === 'Enter') {
+ if (selectedOption) {
+ setIsExecute(true)
+ setInputValue(selectedOption.label);
+ if (selectedOption.label === "Divide into subtasks") {
+ DivideTaskIntoSubTasks()
+ } else if (selectedOption.label === "Summarize") {
+ summarize()
+ }
+ }
+ }
+ };
+
+ return (
+ isExecute && inputValue == "Ask Ai" ? (
+ <PromptAI type="prompt" />
+ ) : isExecute && inputValue === "Define" ? (
+ <PromptAI type="define" />
+ ) : !isExecute ? (
+ <div className='w-screen flex items-center justify-center'>
+ <div className="rounded-2xl bg-gray-800 text-white p-4 dark:bg-slate-900 dark:text-gray-100 w-3/4">
+ <input
+ ref={inputRef}
+ type="text"
+ placeholder="AI action..."
+ value={inputValue}
+ onChange={handleInputChange}
+ onKeyDown={handleKeyDown}
+ id="ai-input"
+ className="bg-gray-700 text-white px-2 py-1 rounded-md dark:bg-gray-800 w-full"
+ />
+ <ul className="mt-2 max-h-90 overflow-y-auto">
+ {filteredOptions.map((option: { label: string }, index: number) => (
+ <li
+ key={index}
+ onClick={() => setSelectedOption(option)}
+ className={`p-2 cursor-pointer ${selectedOption === option ? 'bg-blue-600 text-white border-2 border-blue-500' : ''
+ } hover:bg-gray-600`}
+ >
+ {option.label}
+ </li>
+ ))}
+ </ul>
+ </div>
+ </div>
+ ) : null
+ );
+}
+
+export default CommandPalette;
diff --git a/src/components/PromptAI.tsx b/src/components/PromptAI.tsx
new file mode 100644
index 0000000..7a6b361
--- /dev/null
+++ b/src/components/PromptAI.tsx
@@ -0,0 +1,45 @@
+import React, { useEffect, useRef, useState } from 'react'
+import { askAI, defineWord, DivideTaskIntoSubTasks } from '../ollama';
+
+export const PromptAI = ({ type }) => {
+
+ const placeholder = type === 'prompt' ? "Prompt..." : "Define..."
+ const [inputValue, setInputValue] = useState('');
+ const [hitEnter, setHitEnter] = useState(false)
+
+ useEffect(() => {
+ if (hitEnter) {
+ if (type === 'prompt') {
+ askAI(inputValue)
+ } else {
+ defineWord(inputValue)
+ }
+ }
+ }, [hitEnter])
+
+ const handleInputChange = (e) => {
+ const query = e.target.value;
+ setInputValue(query);
+ };
+
+ const handleKeyDown = (e) => {
+ if (e.key === 'Enter') {
+ setHitEnter(true)
+ }
+ }
+ return (
+ !hitEnter ? (
+ <div className='w-screen text-center'>
+ <input
+ autoFocus
+ type="text"
+ placeholder={placeholder}
+ value={inputValue}
+ onChange={handleInputChange}
+ onKeyDown={handleKeyDown}
+ className="bg-gray-700 text-white px-2 py-1 rounded-md dark:bg-gray-800 inline-block w-3/4"
+ />
+ </div>
+ ) : null
+ )
+}
diff --git a/src/index.css b/src/index.css
new file mode 100644
index 0000000..a90f074
--- /dev/null
+++ b/src/index.css
@@ -0,0 +1,4 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
diff --git a/src/main.tsx b/src/main.tsx
new file mode 100644
index 0000000..c994e33
--- /dev/null
+++ b/src/main.tsx
@@ -0,0 +1,93 @@
+import "@logseq/libs";
+
+import React, { useEffect } from "react";
+import * as ReactDOM from "react-dom/client";
+import App from "./App";
+import "./index.css";
+
+import { logseq as PL } from "../package.json";
+import { SettingSchemaDesc } from "@logseq/libs/dist/LSPlugin";
+
+// @ts-expect-error
+const css = (t, ...args) => String.raw(t, ...args);
+
+const delay = (t = 100) => new Promise(r => setTimeout(r, t))
+
+const pluginId = PL.id;
+
+
+
+let settings: SettingSchemaDesc[] = [
+ {
+ key: "host",
+ type: "string",
+ title: "Host",
+ description: "Set the host of your ollama model",
+ default: "localhost:11434"
+ },
+ {
+ key: "model",
+ type: "string",
+ title: "LLM Model",
+ description: "Set your desired model to use ollama",
+ default: "mistral:instruct"
+ },
+]
+
+function main() {
+ console.log("Hello")
+ console.info(`#${pluginId}: MAIN`);
+ // logseq.useSettingsSchema(settings)
+ let loading = false
+ const root = ReactDOM.createRoot(document.getElementById("app")!);
+
+ root.render(
+ <React.StrictMode>
+ <App />
+ </React.StrictMode>
+ );
+
+ function show() {
+ logseq.showMainUI();
+ }
+ function createModel() {
+ return {
+ show() {
+ logseq.showMainUI();
+ },
+ };
+ }
+
+ logseq.provideModel(createModel());
+ logseq.setMainUIInlineStyle({
+ zIndex: 11,
+ });
+
+ const openIconName = "template-plugin-open";
+
+
+ logseq.provideStyle(css`
+ .${openIconName} {
+ opacity: 1;
+ font-size: 20px;
+ margin-top: 4px;
+ }
+
+ .${openIconName}:hover {
+ color: red;
+ }
+ `);
+
+ logseq.App.registerUIItem("toolbar", {
+ key: openIconName,
+ template: `
+ <a data-on-click="show"
+ class="button">
+ <i class="ti ti-brand-reddit"></i>
+ </a>
+ `,
+ });
+}
+logseq.ready(main).catch(console.error);
+
+
diff --git a/src/ollama.tsx b/src/ollama.tsx
new file mode 100644
index 0000000..a08c6e3
--- /dev/null
+++ b/src/ollama.tsx
@@ -0,0 +1,116 @@
+import { SettingSchemaDesc } from "@logseq/libs/dist/LSPlugin.user";
+
+const delay = (t = 100) => new Promise(r => setTimeout(r, t))
+
+
+let settings: SettingSchemaDesc[] = [
+ {
+ key: "host",
+ type: "string",
+ title: "Host",
+ description: "Set the host of your ollama model",
+ default: "localhost:11434"
+ },
+ {
+ key: "model",
+ type: "string",
+ title: "LLM Model",
+ description: "Set your desired model to use ollama",
+ default: "mistral:instruct"
+ },
+]
+
+async function promptLLM(url: string, prompt: string, model: string) {
+
+
+ const response = await fetch('http://localhost:11434/api/generate', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ model: model,
+ prompt: prompt,
+ stream: false,
+ }),
+ })
+ if (!response.ok) {
+ throw new Error('Network response was not ok');
+ }
+
+ const data = await response.json();
+
+ return data.response;
+
+}
+
+export async function defineWord(word: string) {
+ askAI(`Define this following ${word}`)
+}
+
+export async function summarize() {
+ await delay(300)
+
+ try {
+ const currentSelectedBlocks = await logseq.Editor.getCurrentPageBlocksTree()
+ let blocksContent = ""
+ if (currentSelectedBlocks) {
+ let lastBlock: any = currentSelectedBlocks[currentSelectedBlocks.length - 1]
+ for (const block of currentSelectedBlocks) {
+ blocksContent += block.content + "/n"
+ }
+ if (lastBlock) {
+ lastBlock = await logseq.Editor.insertBlock(lastBlock.uuid, '🚀 Summarizing....', { before: false })
+ }
+
+ const summary = await promptLLM("localhost:11434", `Summarize the following ${blocksContent}`, "mistral:instruct")
+
+ await logseq.Editor.updateBlock(lastBlock.uuid, `Summary: ${summary}`)
+ }
+
+ } catch (e) {
+ logseq.App.showMsg(e.toString(), 'warning')
+ console.error(e)
+ }
+}
+
+export async function askAI(prompt: string) {
+ await delay(300)
+
+ try {
+ const currentSelectedBlocks = await logseq.Editor.getCurrentPageBlocksTree()
+ if (currentSelectedBlocks) {
+ let lastBlock: any = currentSelectedBlocks[currentSelectedBlocks.length - 1]
+ if (lastBlock) {
+ lastBlock = await logseq.Editor.insertBlock(lastBlock.uuid, 'Generating....', { before: true })
+ }
+ const response = await promptLLM("localhost:11434", prompt, "mistral:instruct")
+ await logseq.Editor.updateBlock(lastBlock.uuid, response)
+ }
+
+ } catch (e) {
+ logseq.App.showMsg(e.toString(), 'warning')
+ console.error(e)
+ }
+}
+
+export async function DivideTaskIntoSubTasks() {
+ try {
+ const currentBlock = await logseq.Editor.getCurrentBlock()
+ if (currentBlock) {
+ // const block = await logseq.Editor.insertBlock(currentBlock.uuid, 'Generating....', { before: false })
+ logseq.App.showMsg(`
+ [:div.p-2
+ [:h1 "currentBlock content"]
+ [:h2.text-xl "Divide this task into subtasks: ${currentBlock?.content}"]]
+ `)
+ const response = await promptLLM("localhost:11434", `Divide this task into subtasks with numbers: ${currentBlock.content}`, "mistral:instruct")
+ for (const todo of response.split("\n")) {
+ const block = await logseq.Editor.insertBlock(currentBlock.uuid, `TODO ${todo.slice(3)}`, { before: false })
+ }
+ }
+ } catch (e) {
+ logseq.App.showMsg(e.toString(), 'warning')
+ console.error(e)
+ }
+}
diff --git a/src/utils.ts b/src/utils.ts
new file mode 100644
index 0000000..171bf71
--- /dev/null
+++ b/src/utils.ts
@@ -0,0 +1,24 @@
+import { LSPluginUserEvents } from "@logseq/libs/dist/LSPlugin.user";
+import React from "react";
+
+let _visible = logseq.isMainUIVisible;
+
+function subscribeLogseqEvent<T extends LSPluginUserEvents>(
+ eventName: T,
+ handler: (...args: any) => void
+) {
+ logseq.on(eventName, handler);
+ return () => {
+ logseq.off(eventName, handler);
+ };
+}
+
+const subscribeToUIVisible = (onChange: () => void) =>
+ subscribeLogseqEvent("ui:visible:changed", ({ visible }) => {
+ _visible = visible;
+ onChange();
+ });
+
+export const useAppVisible = () => {
+ return React.useSyncExternalStore(subscribeToUIVisible, () => _visible);
+};