Skip to content

Commit

Permalink
Wrap root modal in portal (#997)
Browse files Browse the repository at this point in the history
* Wrap root modal in portal
* Add venus service
- mobile: fix gap at bottom of chat ui
- openrouter: safely check for antropic model
  • Loading branch information
sceuick authored Aug 7, 2024
1 parent eef461f commit 40fd42f
Show file tree
Hide file tree
Showing 12 changed files with 163 additions and 16 deletions.
2 changes: 2 additions & 0 deletions common/adapters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ export const AI_ADAPTERS = [
'openrouter',
'mancer',
'petals',
'venus',
] as const
export const CHAT_ADAPTERS = ['default', ...AI_ADAPTERS] as const

Expand Down Expand Up @@ -299,6 +300,7 @@ export const ADAPTER_LABELS: { [key in AIAdapter]: string } = {
mancer: 'Mancer',
petals: 'Petals',
agnaistic: 'Agnaistic',
venus: 'Venus',
}

export const INSTRUCT_SERVICES: { [key in AIAdapter]?: boolean } = {
Expand Down
9 changes: 6 additions & 3 deletions common/presets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ export function mapPresetsToAdapter(presets: Partial<AppSchema.GenSettings>, ada
const map = serviceGenMap[adapter]
const body: any = {}

for (const [keyStr, value] of Object.entries(map)) {
for (const [keyStr, value] of Object.entries(map || {})) {
const key = keyStr as keyof GenMap
if (!value) continue

Expand All @@ -133,7 +133,7 @@ export function getGenSettings(chat: AppSchema.Chat, adapter: AIAdapter) {
const presetValues = getPresetValues(chat)

const body: any = {}
for (const [keyStr, value] of Object.entries(map)) {
for (const [keyStr, value] of Object.entries(map || {})) {
const key = keyStr as keyof GenMap
if (!value) continue

Expand Down Expand Up @@ -161,7 +161,7 @@ function getPresetValues(chat: AppSchema.Chat): Partial<AppSchema.GenSettings> {
return defaultPresets.basic
}

export const serviceGenMap: Record<Exclude<ChatAdapter, 'default'>, GenMap> = {
export const serviceGenMap: { [key in ChatAdapter]?: GenMap } = {
kobold: {
maxTokens: 'max_length',
repetitionPenalty: 'rep_pen',
Expand Down Expand Up @@ -434,5 +434,8 @@ export function getFallbackPreset(adapter: AIAdapter): Partial<AppSchema.GenSett

case 'mancer':
return deepClone(defaultPresets.mancer)

case 'venus':
return deepClone(defaultPresets.venus)
}
}
12 changes: 12 additions & 0 deletions common/presets/ooba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,16 @@ export const oobaPresets = {
typicalP: 1,
gaslight: templates.Alpaca,
},
venus: {
name: 'Venus',
service: 'venus',
maxTokens: 300,
maxContextLength: 8000,
presencePenalty: 1.15,
frequencyPenalty: 1.15,
temp: 0.7,
topK: 40,
topP: 1,
gaslight: templates.Alpaca,
},
} satisfies Record<string, Partial<AppSchema.GenSettings>>
3 changes: 3 additions & 0 deletions common/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -916,6 +916,9 @@ export function getContextLimit(

case 'mancer':
return Math.min(configuredMax, 8000) - genAmount

case 'venus':
return Math.min(configuredMax, 7800) - genAmount
}
}

Expand Down
3 changes: 3 additions & 0 deletions srv/adapter/agnaistic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import { AIAdapter, AdapterSetting } from '/common/adapters'
import { AppSchema } from '/common/types'
import { parseStops } from '/common/util'
import { getTextgenCompletion } from './dispatch'
import { handleVenus } from './venus'

export async function getSubscriptionPreset(
user: AppSchema.User,
Expand Down Expand Up @@ -376,6 +377,7 @@ export const handlers: { [key in AIAdapter]: ModelAdapter } = {
mancer: handleMancer,
petals: handlePetals,
agnaistic: handleAgnaistic,
venus: handleVenus,
}

export function getHandlers(settings: Partial<AppSchema.GenSettings>) {
Expand All @@ -392,6 +394,7 @@ export function getHandlers(settings: Partial<AppSchema.GenSettings>) {
case 'petals':
case 'mancer':
case 'novel':
case 'venus':
return handlers[settings.service]
}

Expand Down
6 changes: 3 additions & 3 deletions srv/adapter/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ const baseUrl = `https://api.openai.com`

type Role = 'user' | 'assistant' | 'system'

type CompletionItem = { role: Role; content: string; name?: string }
export type CompletionItem = { role: Role; content: string; name?: string }
type CompletionContent<T> = Array<{ finish_reason: string; index: number } & ({ text: string } | T)>
type Inference = { message: { content: string; role: Role } }
export type Inference = { message: { content: string; role: Role } }

type Completion<T = Inference> = {
export type Completion<T = Inference> = {
id: string
created: number
model: string
Expand Down
3 changes: 2 additions & 1 deletion srv/adapter/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,12 @@ export const handleOpenRouter: ModelAdapter = async function* (opts) {
presence_penalty: opts.gen.presencePenalty,
repetition_penalty: opts.gen.repetitionPenalty,
}

if (opts.gen.openRouterModel?.id) {
payload.model = opts.gen.openRouterModel.id
}

const useChat = opts.gen.openRouterModel?.id.startsWith('anthropic')
const useChat = (opts.gen.openRouterModel?.id || '').startsWith('anthropic')
if (useChat) {
const { messages, system } = await createClaudeChatCompletion(opts)
payload.messages = messages
Expand Down
115 changes: 115 additions & 0 deletions srv/adapter/venus.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import { ModelAdapter } from './type'
import { decryptText } from '../db/util'
import { sanitise, sanitiseAndTrim, trimResponseV2 } from '../api/chat/common'
import { registerAdapter } from './register'
import { getStoppingStrings } from './prompt'
import { streamCompletion } from './chat-completion'

const venusOptions: Record<string, string> = {
Mars: 'asha',
Mercury: 'mythomax',
}

const modelOptions = Object.entries(venusOptions).map(([label, value]) => ({ label, value }))

export const handleVenus: ModelAdapter = async function* (opts) {
const body = {
model: opts.gen.thirdPartyModel || 'mythomax',
frequency_penalty: opts.gen.frequencyPenalty,
max_tokens: opts.gen.maxTokens,
min_tokens: 0,
template: opts.prompt,
presence_penalty: opts.gen.presencePenalty,
stream: true,
return_raw: true,
temperature: opts.gen.temp,
top_p: opts.gen.topP,
top_k: opts.gen.topK,
stop: getStoppingStrings(opts),
}

const url = `https://inference.chub.ai/prompt`
const key = opts.user.adapterConfig?.venus?.apiKey
if (!key) {
yield { error: `Venus request failed: API key not set` }
return
}

const apiKey = opts.guest ? key : decryptText(key)
opts.log.debug({ ...body, prompt: null }, 'Venus payload')
opts.log.debug(`Prompt:\n${body.template}`)
yield { prompt: body.template }

const headers = {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
}

const iter = streamCompletion(opts.user._id, url, headers, body, 'Venus', opts.log, 'openai')
let accumulated = ''

while (true) {
let generated = await iter.next()

// Both the streaming and non-streaming generators return a full completion and yield errors.
if (generated.done) {
break
}

if (generated.value.error) {
yield { error: generated.value.error }
return
}

// Only the streaming generator yields individual tokens.
if ('token' in generated.value) {
accumulated += generated.value.token

if (opts.gen.streamResponse) {
yield {
partial: sanitiseAndTrim(
accumulated,
body.template,
opts.char,
opts.characters,
opts.members
),
}
}
}
}

const parsed = sanitise(accumulated)
const trimmed = trimResponseV2(parsed, opts.replyAs, opts.members, opts.characters)

yield trimmed || parsed
}

registerAdapter('venus', handleVenus, {
label: 'Venus',
settings: [
{
field: 'url',
label: 'Model',
secret: false,
setting: { type: 'list', options: modelOptions },
preset: true,
},
{
field: 'apiKey',
label: 'API Key',
secret: true,
setting: { type: 'text', placeholder: 'E.g. CHK-SBIX...' },
},
],
options: [
'temp',
'frequencyPenalty',
'presencePenalty',
'systemPrompt',
'gaslight',
'topP',
'topK',
'typicalP',
],
})
8 changes: 4 additions & 4 deletions web/pages/Chat/chat-detail.css
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
.chat-detail {
display: grid;
height: calc(100vh - 48px);
height: calc(100dvh - 48px);
height: calc(100vh);
height: calc(100dvh);
max-height: 100vh;
max-height: 100dvh;
width: 100%;
Expand All @@ -11,8 +11,8 @@

.avatar-chat-detail {
display: grid;
height: calc(100vh - 48px);
height: calc(100dvh - 48px);
height: calc(100vh);
height: calc(100dvh);
max-height: 100vh;
max-height: 100dvh;
width: 100%;
Expand Down
6 changes: 5 additions & 1 deletion web/shared/Modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -256,5 +256,9 @@ export const HelpModal: Component<{
}

export const RootModal: Component<Props> = (props) => {
return <Modal {...props} />
return (
<Portal>
<Modal {...props} />
</Portal>
)
}
8 changes: 4 additions & 4 deletions web/shared/Mode/mode.scss
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
.mode {
height: calc(100vh - 48px);
height: calc(100dvh - 48px);
height: calc(100vh);
height: calc(100dvh);
max-height: 100vh;
max-height: 100dvh;
display: grid;
Expand All @@ -15,8 +15,8 @@
}

.mode .pane {
height: calc(100vh - 48px);
height: calc(100dvh - 48px);
height: calc(100vh);
height: calc(100dvh);
max-height: 100vh;
max-height: 100dvh;
overflow-y: auto;
Expand Down
4 changes: 4 additions & 0 deletions web/shared/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -710,6 +710,10 @@ export function isUsableService(
case 'petals': {
return true
}

case 'venus': {
return !!user?.adapterConfig?.venus?.apiKeySet
}
}

return false
Expand Down

0 comments on commit 40fd42f

Please sign in to comment.