File tree 6 files changed +79
-0
lines changed
6 files changed +79
-0
lines changed Original file line number Diff line number Diff line change @@ -22,5 +22,8 @@ MODEL_NAME = ""
22
22
[MODELS .OLLAMA ]
23
23
API_URL = " " # Ollama API URL - http://host.docker.internal:11434
24
24
25
+ [MODELS .DEEPSEEK ]
26
+ API_KEY = " "
27
+
25
28
[API_ENDPOINTS ]
26
29
SEARXNG = " " # SearxNG API URL - http://localhost:32768
Original file line number Diff line number Diff line change 7
7
getGroqApiKey ,
8
8
getOllamaApiEndpoint ,
9
9
getOpenaiApiKey ,
10
+ getDeepseekApiKey ,
10
11
updateConfig ,
11
12
} from '@/lib/config' ;
12
13
import {
@@ -53,6 +54,7 @@ export const GET = async (req: Request) => {
53
54
config [ 'anthropicApiKey' ] = getAnthropicApiKey ( ) ;
54
55
config [ 'groqApiKey' ] = getGroqApiKey ( ) ;
55
56
config [ 'geminiApiKey' ] = getGeminiApiKey ( ) ;
57
+ config [ 'deepseekApiKey' ] = getDeepseekApiKey ( ) ;
56
58
config [ 'customOpenaiApiUrl' ] = getCustomOpenaiApiUrl ( ) ;
57
59
config [ 'customOpenaiApiKey' ] = getCustomOpenaiApiKey ( ) ;
58
60
config [ 'customOpenaiModelName' ] = getCustomOpenaiModelName ( ) ;
@@ -88,6 +90,9 @@ export const POST = async (req: Request) => {
88
90
OLLAMA : {
89
91
API_URL : config . ollamaApiUrl ,
90
92
} ,
93
+ DEEPSEEK : {
94
+ API_KEY : config . deepseekApiKey ,
95
+ } ,
91
96
CUSTOM_OPENAI : {
92
97
API_URL : config . customOpenaiApiUrl ,
93
98
API_KEY : config . customOpenaiApiKey ,
Original file line number Diff line number Diff line change @@ -20,6 +20,7 @@ interface SettingsType {
20
20
anthropicApiKey : string ;
21
21
geminiApiKey : string ;
22
22
ollamaApiUrl : string ;
23
+ deepseekApiKey : string ;
23
24
customOpenaiApiKey : string ;
24
25
customOpenaiApiUrl : string ;
25
26
customOpenaiModelName : string ;
@@ -838,6 +839,25 @@ const Page = () => {
838
839
onSave = { ( value ) => saveConfig ( 'geminiApiKey' , value ) }
839
840
/>
840
841
</ div >
842
+
843
+ < div className = "flex flex-col space-y-1" >
844
+ < p className = "text-black/70 dark:text-white/70 text-sm" >
845
+ Deepseek API Key
846
+ </ p >
847
+ < Input
848
+ type = "text"
849
+ placeholder = "Deepseek API Key"
850
+ value = { config . deepseekApiKey }
851
+ isSaving = { savingStates [ 'deepseekApiKey' ] }
852
+ onChange = { ( e ) => {
853
+ setConfig ( ( prev ) => ( {
854
+ ...prev ! ,
855
+ deepseekApiKey : e . target . value ,
856
+ } ) ) ;
857
+ } }
858
+ onSave = { ( value ) => saveConfig ( 'deepseekApiKey' , value ) }
859
+ />
860
+ </ div >
841
861
</ div >
842
862
</ SettingsSection >
843
863
</ div >
Original file line number Diff line number Diff line change @@ -25,6 +25,9 @@ interface Config {
25
25
OLLAMA : {
26
26
API_URL : string ;
27
27
} ;
28
+ DEEPSEEK : {
29
+ API_KEY : string ;
30
+ } ;
28
31
CUSTOM_OPENAI : {
29
32
API_URL : string ;
30
33
API_KEY : string ;
@@ -63,6 +66,8 @@ export const getSearxngApiEndpoint = () =>
63
66
64
67
export const getOllamaApiEndpoint = ( ) => loadConfig ( ) . MODELS . OLLAMA . API_URL ;
65
68
69
+ export const getDeepseekApiKey = ( ) => loadConfig ( ) . MODELS . DEEPSEEK . API_KEY ;
70
+
66
71
export const getCustomOpenaiApiKey = ( ) =>
67
72
loadConfig ( ) . MODELS . CUSTOM_OPENAI . API_KEY ;
68
73
Original file line number Diff line number Diff line change
1
+ import { ChatOpenAI } from '@langchain/openai' ;
2
+ import { getDeepseekApiKey } from '../config' ;
3
+ import { ChatModel } from '.' ;
4
+ import { BaseChatModel } from '@langchain/core/language_models/chat_models' ;
5
+
6
+ const deepseekChatModels : Record < string , string > [ ] = [
7
+ {
8
+ displayName : 'Deepseek Chat (Deepseek V3)' ,
9
+ key : 'deepseek-chat' ,
10
+ } ,
11
+ {
12
+ displayName : 'Deepseek Reasoner (Deepseek R1)' ,
13
+ key : 'deepseek-reasoner' ,
14
+ } ,
15
+ ] ;
16
+
17
+ export const loadDeepseekChatModels = async ( ) => {
18
+ const deepseekApiKey = getDeepseekApiKey ( ) ;
19
+
20
+ if ( ! deepseekApiKey ) return { } ;
21
+
22
+ try {
23
+ const chatModels : Record < string , ChatModel > = { } ;
24
+
25
+ deepseekChatModels . forEach ( ( model ) => {
26
+ chatModels [ model . key ] = {
27
+ displayName : model . displayName ,
28
+ model : new ChatOpenAI ( {
29
+ openAIApiKey : deepseekApiKey ,
30
+ modelName : model . key ,
31
+ temperature : 0.7 ,
32
+ configuration : {
33
+ baseURL : 'https://api.deepseek.com' ,
34
+ } ,
35
+ } ) as unknown as BaseChatModel ,
36
+ } ;
37
+ } ) ;
38
+
39
+ return chatModels ;
40
+ } catch ( err ) {
41
+ console . error ( `Error loading Deepseek models: ${ err } ` ) ;
42
+ return { } ;
43
+ }
44
+ } ;
Original file line number Diff line number Diff line change @@ -12,6 +12,7 @@ import { loadGroqChatModels } from './groq';
12
12
import { loadAnthropicChatModels } from './anthropic' ;
13
13
import { loadGeminiChatModels , loadGeminiEmbeddingModels } from './gemini' ;
14
14
import { loadTransformersEmbeddingsModels } from './transformers' ;
15
+ import { loadDeepseekChatModels } from './deepseek' ;
15
16
16
17
export interface ChatModel {
17
18
displayName : string ;
@@ -32,6 +33,7 @@ export const chatModelProviders: Record<
32
33
groq : loadGroqChatModels ,
33
34
anthropic : loadAnthropicChatModels ,
34
35
gemini : loadGeminiChatModels ,
36
+ deepseek : loadDeepseekChatModels ,
35
37
} ;
36
38
37
39
export const embeddingModelProviders : Record <
You can’t perform that action at this time.
0 commit comments