Skip to content

Commit cf5834b

Browse files
author
Mauve Signweaver
committed
Add new LLM API
1 parent d7c4ddb commit cf5834b

File tree

5 files changed

+162
-1
lines changed

5 files changed

+162
-1
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -66,9 +66,9 @@ A minimal web browser for the distributed web
6666
- Scan a QR code from the browser action window.
6767
- Right click a link or image to generate a QR code for it
6868
- Built-in ad blocker (ublock origin)
69+
- Use local and cloud LLMs via `window.llm.chat({messages})` and `window.llm.complete("prompt")`
6970
- Built-in support for creating web archives via [ArchiveWeb.page](https://github.com/webrecorder/archiveweb.page/)
7071
- Open links in new windows (right click on element)
71-
- Find text on the page (`ctrl+f` to bring into focus, `esc` to hide)
7272
- Autocomplete URLs from history (type in the URL bar, up/down to navigate, right to autocomplete)
7373
- Persist open windows when quitting
7474
- Save files from pages (any protocol, right click it)

app/config.js

+9
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,15 @@ const DEFAULT_BT_DIR = path.join(USER_DATA, 'bt')
1212
const DEFAULT_PAGE = 'agregore://welcome'
1313

1414
export default RC('agregore', {
15+
llm: {
16+
enabled: true,
17+
18+
baseURL: 'http://localhost:11434/v1/',
19+
// Uncomment this to use OpenAI instead
20+
// baseURL: 'https://api.openai.com/v1/'
21+
apiKey: 'ollama',
22+
model: 'phi3:3.8b-mini-4k-instruct-q4_0'
23+
},
1524
accelerators: {
1625
OpenDevTools: 'CommandOrControl+Shift+I',
1726
NewWindow: 'CommandOrControl+N',

app/index.js

+3
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import { WindowManager } from './window.js'
1010
import { createExtensions } from './extensions/index.js'
1111
import * as history from './history.js'
1212
import { version } from './version.js'
13+
import * as llm from './llm.js'
1314

1415
const IS_DEBUG = process.env.NODE_ENV === 'debug'
1516

@@ -142,6 +143,8 @@ async function onready () {
142143

143144
const webSession = session.fromPartition(WEB_PARTITION)
144145

146+
llm.addPreloads(webSession)
147+
145148
const electronSection = /Electron.+ /i
146149
const existingAgent = webSession.getUserAgent()
147150
const newAgent = existingAgent.replace(electronSection, `AgregoreDesktop/${version} `)

app/llm-preload.js

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
const { contextBridge, ipcRenderer } = require('electron')
2+
3+
contextBridge.exposeInMainWorld('llm', {
4+
chat: (args) => ipcRenderer.invoke('llm-chat', args),
5+
complete: (args) => ipcRenderer.invoke('llm-complete', args),
6+
isSupported: (prompt, args = {}) => ipcRenderer.invoke('llm-supported', { ...args, prompt })
7+
})

app/llm.js

+142
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
import config from './config.js'
2+
import { ipcMain } from 'electron'
3+
import path from 'node:path'
4+
import { fileURLToPath } from 'node:url'
5+
6+
const __dirname = fileURLToPath(new URL('./', import.meta.url))
7+
8+
const { baseURL, apiKey, model, enabled } = config.llm
9+
10+
let isInitialized = false
11+
12+
ipcMain.handle('llm-supported', async (event) => {
13+
return isSupported()
14+
})
15+
16+
ipcMain.handle('llm-chat', async (event, args) => {
17+
return chat(args)
18+
})
19+
20+
ipcMain.handle('llm-complete', async (event, args) => {
21+
return complete(args)
22+
})
23+
24+
export async function isSupported () {
25+
if (!enabled) return false
26+
const has = await hasModel()
27+
if (has) return true
28+
return (apiKey === 'ollama')
29+
}
30+
31+
export function addPreloads (session) {
32+
const preloadPath = path.join(__dirname, 'llm-preload.js')
33+
const preloads = session.getPreloads()
34+
preloads.push(preloadPath)
35+
session.setPreloads(preloads)
36+
}
37+
38+
export async function init () {
39+
if (!enabled) throw new Error('LLM API is not enabled')
40+
if (isInitialized) return
41+
// TODO: prompt for download
42+
if (apiKey === 'ollama') {
43+
const has = await hasModel()
44+
if (!has) {
45+
await pullModel()
46+
}
47+
}
48+
isInitialized = true
49+
}
50+
51+
async function listModels () {
52+
const { data } = await get('./models', 'Unable to list models')
53+
return data
54+
}
55+
56+
async function pullModel () {
57+
await post('/api/pull', {
58+
name: model
59+
}, `Unable to pull model ${model}`)
60+
}
61+
62+
async function hasModel () {
63+
try {
64+
const models = await listModels()
65+
66+
return !!models.find(({ id }) => id === model)
67+
} catch {
68+
return false
69+
}
70+
}
71+
72+
export async function chat ({
73+
messages = [],
74+
temperature,
75+
maxTokens,
76+
stop
77+
}) {
78+
await init()
79+
const { choices } = await post('./chat/completions', {
80+
messages,
81+
model,
82+
temperature,
83+
max_tokens: maxTokens,
84+
stop
85+
}, 'Unable to generate completion')
86+
87+
return choices[0].text
88+
}
89+
90+
export async function complete ({
91+
prompt,
92+
temperature,
93+
maxTokens,
94+
stop
95+
}) {
96+
await init()
97+
const { choices } = await post('./completions', {
98+
prompt,
99+
model,
100+
temperature,
101+
max_tokens: maxTokens,
102+
stop
103+
}, 'Unable to generate completion')
104+
105+
return choices[0].text
106+
}
107+
108+
async function get (path, errorMessage) {
109+
const url = new URL(path, baseURL).href
110+
111+
const response = await fetch(url, {
112+
method: 'GET',
113+
headers: {
114+
Authorization: `Bearer ${apiKey}`
115+
}
116+
})
117+
118+
if (!response.ok) {
119+
throw new Error(`${errorMessage} ${await response.text()}`)
120+
}
121+
122+
return await response.json()
123+
}
124+
125+
async function post (path, data, errorMessage) {
126+
const url = new URL(path, baseURL).href
127+
128+
const response = await fetch(url, {
129+
method: 'POST',
130+
headers: {
131+
'Content-Type': 'application/json; charset=utf8',
132+
Authorization: `Bearer ${apiKey}`
133+
},
134+
body: JSON.stringify(data)
135+
})
136+
137+
if (!response.ok) {
138+
throw new Error(`${errorMessage} ${await response.text()}`)
139+
}
140+
141+
return await response.json()
142+
}

0 commit comments

Comments
 (0)