Skip to content

Commit 5447530

Browse files
committed
Merge branch 'feat/deepseek-provider'
2 parents ed6d46a + 588e68e commit 5447530

File tree

6 files changed

+79
-0
lines changed

6 files changed

+79
-0
lines changed

sample.config.toml

+3
Original file line numberDiff line numberDiff line change
@@ -22,5 +22,8 @@ MODEL_NAME = ""
2222
[MODELS.OLLAMA]
2323
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
2424

25+
[MODELS.DEEPSEEK]
26+
API_KEY = ""
27+
2528
[API_ENDPOINTS]
2629
SEARXNG = "" # SearxNG API URL - http://localhost:32768

src/app/api/config/route.ts

+5
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import {
77
getGroqApiKey,
88
getOllamaApiEndpoint,
99
getOpenaiApiKey,
10+
getDeepseekApiKey,
1011
updateConfig,
1112
} from '@/lib/config';
1213
import {
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
5354
config['anthropicApiKey'] = getAnthropicApiKey();
5455
config['groqApiKey'] = getGroqApiKey();
5556
config['geminiApiKey'] = getGeminiApiKey();
57+
config['deepseekApiKey'] = getDeepseekApiKey();
5658
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
5759
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
5860
config['customOpenaiModelName'] = getCustomOpenaiModelName();
@@ -88,6 +90,9 @@ export const POST = async (req: Request) => {
8890
OLLAMA: {
8991
API_URL: config.ollamaApiUrl,
9092
},
93+
DEEPSEEK: {
94+
API_KEY: config.deepseekApiKey,
95+
},
9196
CUSTOM_OPENAI: {
9297
API_URL: config.customOpenaiApiUrl,
9398
API_KEY: config.customOpenaiApiKey,

src/app/settings/page.tsx

+20
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ interface SettingsType {
2020
anthropicApiKey: string;
2121
geminiApiKey: string;
2222
ollamaApiUrl: string;
23+
deepseekApiKey: string;
2324
customOpenaiApiKey: string;
2425
customOpenaiApiUrl: string;
2526
customOpenaiModelName: string;
@@ -838,6 +839,25 @@ const Page = () => {
838839
onSave={(value) => saveConfig('geminiApiKey', value)}
839840
/>
840841
</div>
842+
843+
<div className="flex flex-col space-y-1">
844+
<p className="text-black/70 dark:text-white/70 text-sm">
845+
Deepseek API Key
846+
</p>
847+
<Input
848+
type="text"
849+
placeholder="Deepseek API Key"
850+
value={config.deepseekApiKey}
851+
isSaving={savingStates['deepseekApiKey']}
852+
onChange={(e) => {
853+
setConfig((prev) => ({
854+
...prev!,
855+
deepseekApiKey: e.target.value,
856+
}));
857+
}}
858+
onSave={(value) => saveConfig('deepseekApiKey', value)}
859+
/>
860+
</div>
841861
</div>
842862
</SettingsSection>
843863
</div>

src/lib/config.ts

+5
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ interface Config {
2525
OLLAMA: {
2626
API_URL: string;
2727
};
28+
DEEPSEEK: {
29+
API_KEY: string;
30+
};
2831
CUSTOM_OPENAI: {
2932
API_URL: string;
3033
API_KEY: string;
@@ -63,6 +66,8 @@ export const getSearxngApiEndpoint = () =>
6366

6467
export const getOllamaApiEndpoint = () => loadConfig().MODELS.OLLAMA.API_URL;
6568

69+
export const getDeepseekApiKey = () => loadConfig().MODELS.DEEPSEEK.API_KEY;
70+
6671
export const getCustomOpenaiApiKey = () =>
6772
loadConfig().MODELS.CUSTOM_OPENAI.API_KEY;
6873

src/lib/providers/deepseek.ts

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import { ChatOpenAI } from '@langchain/openai';
2+
import { getDeepseekApiKey } from '../config';
3+
import { ChatModel } from '.';
4+
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
5+
6+
const deepseekChatModels: Record<string, string>[] = [
7+
{
8+
displayName: 'Deepseek Chat (Deepseek V3)',
9+
key: 'deepseek-chat',
10+
},
11+
{
12+
displayName: 'Deepseek Reasoner (Deepseek R1)',
13+
key: 'deepseek-reasoner',
14+
},
15+
];
16+
17+
export const loadDeepseekChatModels = async () => {
18+
const deepseekApiKey = getDeepseekApiKey();
19+
20+
if (!deepseekApiKey) return {};
21+
22+
try {
23+
const chatModels: Record<string, ChatModel> = {};
24+
25+
deepseekChatModels.forEach((model) => {
26+
chatModels[model.key] = {
27+
displayName: model.displayName,
28+
model: new ChatOpenAI({
29+
openAIApiKey: deepseekApiKey,
30+
modelName: model.key,
31+
temperature: 0.7,
32+
configuration: {
33+
baseURL: 'https://api.deepseek.com',
34+
},
35+
}) as unknown as BaseChatModel,
36+
};
37+
});
38+
39+
return chatModels;
40+
} catch (err) {
41+
console.error(`Error loading Deepseek models: ${err}`);
42+
return {};
43+
}
44+
};

src/lib/providers/index.ts

+2
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
1212
import { loadAnthropicChatModels } from './anthropic';
1313
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
1414
import { loadTransformersEmbeddingsModels } from './transformers';
15+
import { loadDeepseekChatModels } from './deepseek';
1516

1617
export interface ChatModel {
1718
displayName: string;
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
3233
groq: loadGroqChatModels,
3334
anthropic: loadAnthropicChatModels,
3435
gemini: loadGeminiChatModels,
36+
deepseek: loadDeepseekChatModels,
3537
};
3638

3739
export const embeddingModelProviders: Record<

0 commit comments

Comments
 (0)