Skip to content

Commit

Permalink
https://github.com/nbonamy/witsy/issues/33
Browse files Browse the repository at this point in the history
  • Loading branch information
nbonamy committed Dec 28, 2024
1 parent a4f5436 commit bb035d6
Show file tree
Hide file tree
Showing 8 changed files with 144 additions and 7 deletions.
1 change: 1 addition & 0 deletions assets/openrouter.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 9 additions & 0 deletions defaults/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,15 @@
"chat": ""
}
},
"openrouter": {
"models": {
"chat": [],
"image": []
},
"model": {
"chat": ""
}
},
"deepseek": {
"models": {
"chat": [],
Expand Down
28 changes: 25 additions & 3 deletions src/components/EngineLogo.vue
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ import logoDeepSeek from '../../assets/deepseek.svg'
import logoGroq from '../../assets/groq.svg'
// @ts-expect-error svg
import logoCerberas from '../../assets/cerebras.svg'
// @ts-expect-error svg
import logoOpenRouter from '../../assets/openrouter.svg'
const logos: { [key: string]: any } = {
openai: logoOpenAI,
Expand All @@ -32,6 +34,7 @@ const logos: { [key: string]: any } = {
mistralai: logoMistralAI,
google: logoGoogle,
xai: logoXAI,
openrouter: logoOpenRouter,
deepseek: logoDeepSeek,
groq: logoGroq,
cerebras: logoCerberas,
Expand Down Expand Up @@ -106,7 +109,11 @@ const logo = computed(() => logos[props.engine])
}
.logo.grayscale.deepseek {
filter: grayscale() brightness(2.0);
filter: grayscale() invert() brightness(0);
}
.logo.background.openrouter {
background-color: #f0f0ea;
}
.logo.grayscale.cerebras {
Expand All @@ -119,6 +126,7 @@ const logo = computed(() => logos[props.engine])
}
@media (prefers-color-scheme: dark) {
.logo.openai {
filter: invert(1) brightness(0.7);
}
Expand All @@ -140,6 +148,17 @@ const logo = computed(() => logos[props.engine])
.logo.grayscale.xai {
filter: invert(1) brightness(0.7);
}
.logo.grayscale.deepseek {
filter: grayscale() brightness(1.7);
}
.logo.grayscale.openrouter {
filter: invert(1) brightness(0.7);
}
.logo.grayscale.cerebras {
filter: grayscale() brightness(1.3);
}
[data-tint=blue] {
.logo.grayscale.openai {
Expand All @@ -158,14 +177,17 @@ const logo = computed(() => logos[props.engine])
filter: grayscale() brightness(1.2);
}
.logo.grayscale.xai {
filter: invert(1) brightness(0.9);
filter: invert(1) brightness(0.8);
}
.logo.grayscale.groq {
filter: invert(1) brightness(0.9);
filter: invert(1) brightness(0.8);
}
.logo.grayscale.cerebras {
filter: grayscale() brightness(1.85);
}
.logo.grayscale.openrouter {
filter: invert(1) brightness(0.8);
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/components/EngineSelect.vue
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
<option value="mistralai">MistralAI</option>
<option value="google">Google</option>
<option value="xai">xAI</option>
<option value="openrouter">OpenRouter</option>
<option value="deepseek">DeepSeek</option>
<option value="groq">Groq</option>
<option value="cerebras">Cerebras</option>
Expand Down
9 changes: 7 additions & 2 deletions src/llms/llm.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@

import { Configuration, EngineConfig } from 'types/config'
import { Anthropic, Ollama, MistralAI, Google, Groq, XAI, DeepSeek, Cerebras, LlmEngine, loadAnthropicModels, loadCerebrasModels, loadGoogleModels, loadGroqModels, loadMistralAIModels, loadOllamaModels, loadOpenAIModels, loadXAIModels, hasVisionModels as _hasVisionModels, isVisionModel as _isVisionModel, ModelsList, Model, loadDeepSeekModels } from 'multi-llm-ts'
import { Anthropic, Ollama, MistralAI, Google, Groq, XAI, OpenRouter, DeepSeek, Cerebras, LlmEngine, loadAnthropicModels, loadCerebrasModels, loadGoogleModels, loadGroqModels, loadMistralAIModels, loadOllamaModels, loadOpenAIModels, loadXAIModels, hasVisionModels as _hasVisionModels, isVisionModel as _isVisionModel, ModelsList, Model, loadOpenRouterModels, loadDeepSeekModels } from 'multi-llm-ts'
import { isSpecializedModel as isSpecialAnthropicModel, getFallbackModel as getAnthropicFallbackModel , getComputerInfo } from './anthropic'
import { imageFormats, textFormats } from '../models/attachment'
import { store } from '../services/store'
import OpenAI from './openai'

export const availableEngines = [ 'openai', 'ollama', 'anthropic', 'mistralai', 'google', 'xai', 'deepseek', 'groq', 'cerebras' ]
export const availableEngines = [ 'openai', 'ollama', 'anthropic', 'mistralai', 'google', 'xai', 'openrouter', 'deepseek', 'groq', 'cerebras' ]
export const staticModelsEngines = [ 'anthropic', 'google', 'xai', 'deepseek', 'groq', 'cerebras' ]

export default class LlmFactory {
Expand Down Expand Up @@ -56,6 +56,7 @@ export default class LlmFactory {
if (engine === 'mistralai') return MistralAI.isConfigured(this.config.engines.mistralai)
if (engine === 'ollama') return Ollama.isConfigured(this.config.engines.ollama)
if (engine === 'openai') return OpenAI.isConfigured(this.config.engines.openai)
if (engine === 'openrouter') return OpenRouter.isConfigured(this.config.engines.openrouter)
if (engine === 'xai') return XAI.isConfigured(this.config.engines.xai)
return false
}
Expand All @@ -69,6 +70,7 @@ export default class LlmFactory {
if (engine === 'mistralai') return MistralAI.isReady(this.config.engines.mistralai, this.config.engines.mistralai?.models)
if (engine === 'ollama') return Ollama.isReady(this.config.engines.ollama, this.config.engines.ollama?.models)
if (engine === 'openai') return OpenAI.isReady(this.config.engines.openai, this.config.engines.openai?.models)
if (engine === 'openrouter') return OpenRouter.isReady(this.config.engines.openrouter, this.config.engines.openrouter?.models)

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.

Check failure on line 73 in src/llms/llm.ts

View workflow job for this annotation

GitHub Actions / build

Unhandled error

TypeError: Cannot read properties of undefined (reading 'isReady') ❯ LlmFactory.isEngineReady src/llms/llm.ts:73:52 ❯ Proxy.load src/services/store.ts:62:24 ❯ setup src/screens/Main.vue:36:7 ❯ callWithErrorHandling node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:200:19 ❯ setupStatefulComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7848:25 ❯ setupComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:7809:36 ❯ mountComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5159:7 ❯ processComponent node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5125:9 ❯ patch node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:4654:11 ❯ ReactiveEffect.componentUpdateFn [as fn] node_modules/@vue/runtime-core/dist/runtime-core.cjs.js:5269:11 This error originated in "tests/screens/main.test.ts" test file. It doesn't mean the error was thrown inside the file itself, but while it was running. The latest test that might've caused the error is "Rename chat". It might mean one of the following: - The error was thrown, while Vitest was running this test. - If the error occurred after the test had been completed, this was the last documented test before it was thrown.
if (engine === 'xai') return XAI.isReady(this.config.engines.xai, this.config.engines.xai?.models)
return false
}
Expand All @@ -84,6 +86,7 @@ export default class LlmFactory {
if (engine === 'mistralai') return new MistralAI(this.config.engines.mistralai)
if (engine === 'ollama') return new Ollama(this.config.engines.ollama)
if (engine === 'openai') return new OpenAI(this.config.engines.openai)
if (engine === 'openrouter') return new OpenRouter(this.config.engines.openrouter)
if (engine === 'xai') return new XAI(this.config.engines.xai)

// fallback
Expand Down Expand Up @@ -145,6 +148,8 @@ export default class LlmFactory {
models = await loadCerebrasModels(this.config.engines.cerebras)
} else if (engine === 'xai') {
models = await loadXAIModels(this.config.engines.xai)
} else if (engine === 'openrouter') {
models = await loadOpenRouterModels(this.config.engines.openrouter)
} else if (engine === 'deepseek') {
models = await loadDeepSeekModels(this.config.engines.deepseek)
}
Expand Down
4 changes: 2 additions & 2 deletions src/services/generator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,13 +100,13 @@ export default class Generator {
if (error.status === 401 || message.includes('401') || message.includes('apikey')) {
response.setText('You need to enter your API key in the Models tab of <a href="#settings_models">Settings</a> in order to chat.')
rc = false
} else if (error.status === 400 && (message.includes('credit') || message.includes('balance'))) {
} else if ((error.status === 400 || error.status === 402) && (message.includes('credit') || message.includes('balance'))) {
response.setText('Sorry, it seems you have run out of credits. Check the balance of your LLM provider account.')
rc = false
} else if (error.status === 400 && (message.includes('context length') || message.includes('too long'))) {
response.setText('Sorry, it seems this message exceeds this model context length. Try to shorten your prompt or try another model.')
rc = false
} else if (error.status === 400 && (message.includes('function call') || message.includes('tools'))) {
} else if ((error.status === 400 || error.status === 404) && (message.includes('function call') || message.includes('tools') || message.includes('tool use'))) {
if (llm.plugins.length > 0) {
console.log('Model does not support function calling: removing tool and retrying')
llm.clearPlugins()
Expand Down
3 changes: 3 additions & 0 deletions src/settings/SettingsLLM.vue
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import SettingsGroq from './SettingsGroq.vue'
import SettingsCerberas from './SettingsCerebras.vue'
import SettingsXAI from './SettingsXAI.vue'
import SettingsDeepSeek from './SettingsDeepSeek.vue'
import SettingsOpenRouter from './SettingsOpenRouter.vue'
const currentEngine = ref(availableEngines[0])
const engineSettings = ref(null)
Expand All @@ -44,6 +45,7 @@ const engines = computed(() => {
mistralai: 'Mistral AI',
google: 'Google',
xai: 'xAI',
openrouter: 'OpenRouter',
deepseek: 'DeepSeek',
groq: 'Groq',
cerebras: 'Cerebras',
Expand All @@ -60,6 +62,7 @@ const currentView = computed(() => {
if (currentEngine.value == 'google') return SettingsGoogle
if (currentEngine.value == 'xai') return SettingsXAI
if (currentEngine.value == 'deepseek') return SettingsDeepSeek
if (currentEngine.value == 'openrouter') return SettingsOpenRouter
if (currentEngine.value == 'groq') return SettingsGroq
if (currentEngine.value == 'cerebras') return SettingsCerberas
})
Expand Down
96 changes: 96 additions & 0 deletions src/settings/SettingsOpenRouter.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@

<template>
<div>
<div class="group">
<label>API key</label>
<div class="subgroup">
<InputObfuscated v-model="apiKey" @blur="onKeyChange" />
<a href="https://openrouter.ai/settings/keys" target="_blank">Get your API key</a>
</div>
</div>
<div class="group">
<label>Chat model</label>
<div class="subgroup">
<select v-model="chat_model" :disabled="chat_models.length == 0" @change="save">
<option v-for="model in chat_models" :key="model.id" :value="model.id">
{{ model.name }}
</option>
</select>
<a href="https://openrouter.ai/models" target="_blank">More about OpenRouter models</a>
</div>
<button @click.prevent="onRefresh">{{ refreshLabel }}</button>
</div>
</div>
</template>

<script setup lang="ts">
import { ref } from 'vue'
import { store } from '../services/store'
import LlmFactory from '../llms/llm'
import InputObfuscated from '../components/InputObfuscated.vue'
const apiKey = ref(null)
const refreshLabel = ref('Refresh')
const chat_model = ref(null)
const chat_models = ref([])
const load = () => {
apiKey.value = store.config.engines.openrouter?.apiKey || ''
chat_models.value = store.config.engines.openrouter?.models?.chat || []
chat_model.value = store.config.engines.openrouter?.model?.chat || ''
}
const onRefresh = async () => {
refreshLabel.value = 'Refreshing…'
setTimeout(() => getModels(), 500)
}
const setEphemeralRefreshLabel = (text: string) => {
refreshLabel.value = text
setTimeout(() => refreshLabel.value = 'Refresh', 2000)
}
const getModels = async () => {
// load
const llmFactory = new LlmFactory(store.config)
let success = await llmFactory.loadModels('openrouter')
if (!success) {
chat_models.value = []
setEphemeralRefreshLabel('Error!')
return
}
// reload
load()
// done
setEphemeralRefreshLabel('Done!')
}
const onKeyChange = () => {
if (chat_models.value.length === 0 && apiKey.value.length > 0) {
store.config.engines.openrouter.apiKey = apiKey.value
getModels()
}
save()
}
const save = () => {
store.config.engines.openrouter.apiKey = apiKey.value
store.config.engines.openrouter.model.chat = chat_model.value
store.saveSettings()
}
defineExpose({ load })
</script>

<style scoped>
@import '../../css/dialog.css';
@import '../../css/tabs.css';
@import '../../css/form.css';
@import '../../css/panel.css';
</style>

0 comments on commit bb035d6

Please sign in to comment.