@@ -52,13 +52,15 @@ import { DropdownItem, Model, UpgradeInfo } from '../types/Chat.ts';
5252import packageJson from '../../package.json' ;
5353import { isMac } from '../App.tsx' ;
5454import CustomDropdown from './DropdownComponent.tsx' ;
55- import { getTotalCost } from './ModelPrice.ts' ;
55+ import {
56+ addBedrockPrefixToDeepseekModels ,
57+ getTotalCost ,
58+ } from './ModelPrice.ts' ;
5659import {
5760 BedrockThinkingModels ,
58- DeepSeekModels ,
61+ DefaultTextModel ,
5962 getAllRegions ,
60- getDefaultTextModels ,
61- GPTModels ,
63+ getDefaultApiKeyModels ,
6264} from '../storage/Constants.ts' ;
6365import CustomTextInput from './CustomTextInput.tsx' ;
6466import { requestAllOllamaModels } from '../api/ollama-api.ts' ;
@@ -138,23 +140,30 @@ function SettingsScreen(): React.JSX.Element {
138140 return ;
139141 }
140142 saveOllamaApiURL ( ollamaApiUrl ) ;
141- if ( ollamaApiUrl . length > 0 ) {
142- fetchAndSetModelNames ( ) . then ( ) ;
143- }
143+ fetchAndSetModelNames ( ) . then ( ) ;
144144 } , [ ollamaApiUrl ] ) ;
145145
146146 useEffect ( ( ) => {
147+ if ( deepSeekApiKey === getDeepSeekApiKey ( ) ) {
148+ return ;
149+ }
147150 saveDeepSeekApiKey ( deepSeekApiKey ) ;
151+ fetchAndSetModelNames ( ) . then ( ) ;
148152 } , [ deepSeekApiKey ] ) ;
149153
150154 useEffect ( ( ) => {
155+ if ( openAIApiKey === getOpenAIApiKey ( ) ) {
156+ return ;
157+ }
151158 saveOpenAIApiKey ( openAIApiKey ) ;
159+ fetchAndSetModelNames ( ) . then ( ) ;
152160 } , [ openAIApiKey ] ) ;
153161
154162 const fetchAndSetModelNames = async ( ) => {
155163 controllerRef . current = new AbortController ( ) ;
156164 const ollamaModels = await requestAllOllamaModels ( ) ;
157165 const response = await requestAllModels ( ) ;
166+ addBedrockPrefixToDeepseekModels ( response . textModel ) ;
158167 if ( response . imageModel . length > 0 ) {
159168 setImageModels ( response . imageModel ) ;
160169 const imageModel = getImageModel ( ) ;
@@ -170,13 +179,16 @@ function SettingsScreen(): React.JSX.Element {
170179 }
171180 }
172181 if ( response . textModel . length === 0 ) {
173- response . textModel = [ ...getDefaultTextModels ( ) , ...ollamaModels ] ;
182+ response . textModel = [
183+ ...DefaultTextModel ,
184+ ...ollamaModels ,
185+ ...getDefaultApiKeyModels ( ) ,
186+ ] ;
174187 } else {
175188 response . textModel = [
176189 ...response . textModel ,
177190 ...ollamaModels ,
178- ...DeepSeekModels ,
179- ...GPTModels ,
191+ ...getDefaultApiKeyModels ( ) ,
180192 ] ;
181193 }
182194 setTextModels ( response . textModel ) ;
0 commit comments