Skip to content

Commit 1ff1aef

Browse files
committed
✨ feat: support close openai
1 parent c398063 commit 1ff1aef

File tree

5 files changed

+27
-14
lines changed

5 files changed

+27
-14
lines changed

src/app/settings/llm/OpenAI/index.tsx

+1-6
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,7 @@ import { memo } from 'react';
44
import ProviderConfig from '../components/ProviderConfig';
55

66
const OpenAIProvider = memo(() => (
7-
<ProviderConfig
8-
canDeactivate={false}
9-
provider={'openai'}
10-
showEndpoint
11-
title={<OpenAI.Combine size={24} />}
12-
/>
7+
<ProviderConfig provider={'openai'} showEndpoint title={<OpenAI.Combine size={24} />} />
138
));
149

1510
export default OpenAIProvider;

src/config/server/provider.ts

+22
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,10 @@ declare global {
8383
* @deprecated
8484
*/
8585
OLLAMA_CUSTOM_MODELS?: string;
86+
/**
87+
* @deprecated
88+
*/
89+
OPENROUTER_CUSTOM_MODELS?: string;
8690
}
8791
}
8892
}
@@ -119,6 +123,24 @@ export const getProviderConfig = () => {
119123
regions = process.env.OPENAI_FUNCTION_REGIONS.split(',');
120124
}
121125

126+
if (process.env.CUSTOM_MODELS) {
127+
console.warn(
128+
'DEPRECATED: `CUSTOM_MODELS` is deprecated, please use `OPENAI_MODEL_LIST` instead, we will remove `CUSTOM_MODELS` in the LobeChat 1.0',
129+
);
130+
}
131+
132+
if (process.env.OLLAMA_CUSTOM_MODELS) {
133+
console.warn(
134+
'DEPRECATED: `OLLAMA_CUSTOM_MODELS` is deprecated, please use `OLLAMA_MODEL_LIST` instead, we will remove `OLLAMA_CUSTOM_MODELS` in the LobeChat 1.0',
135+
);
136+
}
137+
138+
if (process.env.OPENROUTER_CUSTOM_MODELS) {
139+
console.warn(
140+
'DEPRECATED: `OPENROUTER_CUSTOM_MODELS` is deprecated, please use `OPENROUTER_MODEL_LIST` instead, we will remove `OPENROUTER_CUSTOM_MODELS` in the LobeChat 1.0',
141+
);
142+
}
143+
122144
return {
123145
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
124146

src/const/settings/index.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = {
108108
},
109109
openai: {
110110
apiKey: '',
111-
enabled: false,
111+
enabled: true,
112112
enabledModels: filterEnabledModels(OpenAIProvider),
113113
},
114114
openrouter: {

src/store/global/slices/settings/selectors/modelConfig.ts

+2-7
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,8 @@ const providerConfig = (provider: string) => (s: GlobalStore) =>
1414
| GeneralModelProviderConfig
1515
| undefined;
1616

17-
const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) => {
18-
// TODO: we need to migrate the 'openAI' key to 'openai'
19-
// @ts-ignore
20-
if (provider === 'openai') return true;
21-
22-
return currentSettings(s).languageModel[provider]?.enabled || false;
23-
};
17+
const providerEnabled = (provider: GlobalLLMProviderKey) => (s: GlobalStore) =>
18+
currentSettings(s).languageModel[provider]?.enabled || false;
2419

2520
const providerEnableModels = (provider: string) => (s: GlobalStore) => {
2621
if (!providerConfig(provider)(s)?.enabledModels) return;

src/store/global/slices/settings/selectors/modelProvider.ts

+1
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ const serverProviderModelCards =
4343
* define all the model list of providers
4444
*/
4545
const providerModelList = (s: GlobalStore): ModelProviderCard[] => {
46+
// if the chat model is config in the server side, use the server side model cards
4647
const openaiChatModels = serverProviderModelCards('openai')(s);
4748
const ollamaChatModels = serverProviderModelCards('ollama')(s);
4849
const openrouterChatModels = serverProviderModelCards('openrouter')(s);

0 commit comments

Comments
 (0)