Skip to content

Commit

Permalink
deepseek support
Browse files Browse the repository at this point in the history
  • Loading branch information
nbonamy committed Dec 26, 2024
1 parent 9f5df95 commit f34d9c5
Show file tree
Hide file tree
Showing 7 changed files with 122 additions and 8 deletions.
1 change: 1 addition & 0 deletions assets/deepseek.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 9 additions & 0 deletions defaults/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,15 @@
"chat": ""
}
},
"deepseek": {
"models": {
"chat": [],
"image": []
},
"model": {
"chat": ""
}
},
"cerebras": {
"models": {
"chat": [],
Expand Down
8 changes: 4 additions & 4 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
"markdown-it-mark": "^4.0.0",
"minimatch": "^10.0.1",
"mitt": "^3.0.1",
"multi-llm-ts": "^2.4.11",
"multi-llm-ts": "^2.5.0",
"nestor-client": "^0.3.1",
"number-flip-animation": "github:nbonamy/number-flip-animation",
"officeparser": "^5.1.1",
Expand Down
11 changes: 8 additions & 3 deletions src/llms/llm.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@

import { Configuration, EngineConfig } from 'types/config.d'
import { Anthropic, Ollama, MistralAI, Google, Groq, XAI, Cerebras, LlmEngine, loadAnthropicModels, loadCerebrasModels, loadGoogleModels, loadGroqModels, loadMistralAIModels, loadOllamaModels, loadOpenAIModels, loadXAIModels, hasVisionModels as _hasVisionModels, isVisionModel as _isVisionModel, ModelsList, Model } from 'multi-llm-ts'
import { Anthropic, Ollama, MistralAI, Google, Groq, XAI, DeepSeek, Cerebras, LlmEngine, loadAnthropicModels, loadCerebrasModels, loadGoogleModels, loadGroqModels, loadMistralAIModels, loadOllamaModels, loadOpenAIModels, loadXAIModels, hasVisionModels as _hasVisionModels, isVisionModel as _isVisionModel, ModelsList, Model, loadDeepSeekModels } from 'multi-llm-ts'
import { isSpecializedModel as isSpecialAnthropicModel, getFallbackModel as getAnthropicFallbackModel , getComputerInfo } from './anthropic'
import { imageFormats, textFormats } from '../models/attachment'
import { store } from '../services/store'
import OpenAI from './openai'

export const availableEngines = [ 'openai', 'ollama', 'anthropic', 'mistralai', 'google', 'xai', 'groq', 'cerebras' ]
export const staticModelsEngines = [ 'anthropic', 'google', 'xai', 'groq', 'cerebras' ]
export const availableEngines = [ 'openai', 'ollama', 'anthropic', 'mistralai', 'google', 'xai', 'deepseek', 'groq', 'cerebras' ]
export const staticModelsEngines = [ 'anthropic', 'google', 'xai', 'deepseek', 'groq', 'cerebras' ]

export default class LlmFactory {

Expand Down Expand Up @@ -50,6 +50,7 @@ export default class LlmFactory {
isEngineConfigured = (engine: string): boolean => {
if (engine === 'anthropic') return Anthropic.isConfigured(this.config.engines.anthropic)
if (engine === 'cerebras') return Cerebras.isConfigured(this.config.engines.cerebras)
if (engine === 'deepseek') return DeepSeek.isConfigured(this.config.engines.deepseek)
if (engine === 'google') return Google.isConfigured(this.config.engines.google)
if (engine === 'groq') return Groq.isConfigured(this.config.engines.groq)
if (engine === 'mistralai') return MistralAI.isConfigured(this.config.engines.mistralai)
Expand All @@ -62,6 +63,7 @@ export default class LlmFactory {
isEngineReady = (engine: string): boolean => {
if (engine === 'anthropic') return Anthropic.isReady(this.config.engines.anthropic, this.config.engines.anthropic?.models)
if (engine === 'cerebras') return Cerebras.isReady(this.config.engines.cerebras, this.config.engines.cerebras?.models)
if (engine === 'deepseek') return DeepSeek.isReady(this.config.engines.deepseek, this.config.engines.deepseek?.models)
if (engine === 'google') return Google.isReady(this.config.engines.google, this.config.engines.google?.models)
if (engine === 'groq') return Groq.isReady(this.config.engines.groq, this.config.engines.groq?.models)
if (engine === 'mistralai') return MistralAI.isReady(this.config.engines.mistralai, this.config.engines.mistralai?.models)
Expand All @@ -76,6 +78,7 @@ export default class LlmFactory {
// select
if (engine === 'anthropic') return new Anthropic(this.config.engines.anthropic, getComputerInfo())
if (engine === 'cerebras') return new Cerebras(this.config.engines.cerebras)
if (engine === 'deepseek') return new DeepSeek(this.config.engines.deepseek)
if (engine === 'google') return new Google(this.config.engines.google)
if (engine === 'groq') return new Groq(this.config.engines.groq)
if (engine === 'mistralai') return new MistralAI(this.config.engines.mistralai)
Expand Down Expand Up @@ -142,6 +145,8 @@ export default class LlmFactory {
models = await loadCerebrasModels(this.config.engines.cerebras)
} else if (engine === 'xai') {
models = await loadXAIModels(this.config.engines.xai)
} else if (engine === 'deepseek') {
models = await loadDeepSeekModels(this.config.engines.deepseek)
}

// needed
Expand Down
96 changes: 96 additions & 0 deletions src/settings/SettingsDeepSeek.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@

<template>
<div>
<div class="group">
<label>API key</label>
<div class="subgroup">
<InputObfuscated v-model="apiKey" @blur="onKeyChange" />
<a href="https://platform.deepseek.com/api_keys" target="_blank">Get your API key</a>
</div>
</div>
<div class="group">
<label>Chat model</label>
<div class="subgroup">
<select v-model="chat_model" :disabled="chat_models.length == 0" @change="save">
<option v-for="model in chat_models" :key="model.id" :value="model.id">
{{ model.name }}
</option>
</select>
<a href="https://api-docs.deepseek.com/quick_start/pricing" target="_blank">More about DeepSeek models</a>
</div>
<!-- <button style="visibility: hidden;" @click.prevent="onRefresh">{{ refreshLabel }}</button> -->
</div>
</div>
</template>

<script setup lang="ts">
import { ref } from 'vue'
import { store } from '../services/store'
import LlmFactory from '../llms/llm'
import InputObfuscated from '../components/InputObfuscated.vue'
const apiKey = ref(null)
const refreshLabel = ref('Refresh')
const chat_model = ref(null)
const chat_models = ref([])
const load = () => {
apiKey.value = store.config.engines.deepseek?.apiKey || ''
chat_models.value = store.config.engines.deepseek?.models?.chat || []
chat_model.value = store.config.engines.deepseek?.model?.chat || ''
}
// const onRefresh = async () => {
// refreshLabel.value = 'Refreshing…'
// setTimeout(() => getModels(), 500)
// }
const setEphemeralRefreshLabel = (text: string) => {
// refreshLabel.value = text
// setTimeout(() => refreshLabel.value = 'Refresh', 2000)
}
const getModels = async () => {
// load
const llmFactory = new LlmFactory(store.config)
let success = await llmFactory.loadModels('deepseek')
if (!success) {
chat_models.value = []
setEphemeralRefreshLabel('Error!')
return
}
// reload
load()
// done
setEphemeralRefreshLabel('Done!')
}
const onKeyChange = () => {
if (chat_models.value.length === 0 && apiKey.value.length > 0) {
store.config.engines.deepseek.apiKey = apiKey.value
getModels()
}
save()
}
const save = () => {
store.config.engines.deepseek.apiKey = apiKey.value
store.config.engines.deepseek.model.chat = chat_model.value
store.saveSettings()
}
defineExpose({ load })
</script>

<style scoped>
@import '../../css/dialog.css';
@import '../../css/tabs.css';
@import '../../css/form.css';
@import '../../css/panel.css';
</style>
3 changes: 3 additions & 0 deletions src/settings/SettingsLLM.vue
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import SettingsGoogle from './SettingsGoogle.vue'
import SettingsGroq from './SettingsGroq.vue'
import SettingsCerberas from './SettingsCerebras.vue'
import SettingsXAI from './SettingsXAI.vue'
import SettingsDeepSeek from './SettingsDeepSeek.vue'
const currentEngine = ref(availableEngines[0])
const engineSettings = ref(null)
Expand All @@ -43,6 +44,7 @@ const engines = computed(() => {
mistralai: 'Mistral AI',
google: 'Google',
xai: 'xAI',
deepseek: 'DeepSeek',
groq: 'Groq',
cerebras: 'Cerebras',
}[engine],
Expand All @@ -57,6 +59,7 @@ const currentView = computed(() => {
if (currentEngine.value == 'mistralai') return SettingsMistralAI
if (currentEngine.value == 'google') return SettingsGoogle
if (currentEngine.value == 'xai') return SettingsXAI
if (currentEngine.value == 'deepseek') return SettingsDeepSeek
if (currentEngine.value == 'groq') return SettingsGroq
if (currentEngine.value == 'cerebras') return SettingsCerberas
})
Expand Down

0 comments on commit f34d9c5

Please sign in to comment.