diff --git a/app/src/constants/llm.ts b/app/src/constants/llm.ts
new file mode 100644
index 00000000..734288de
--- /dev/null
+++ b/app/src/constants/llm.ts
@@ -0,0 +1,18 @@
+export const LLM_MODELS = [
+ 'o3-mini',
+ 'o1',
+ 'deepseek-reasoner',
+ 'deepseek-chat',
+ 'gpt-4o-mini',
+ 'gpt-4o',
+ 'gpt-4',
+ 'gpt-4-32k',
+ 'gpt-4-turbo',
+ 'gpt-3.5-turbo',
+]
+
+export const LLM_PROVIDERS = [
+ 'https://api.openai.com',
+ 'https://api.deepseek.com',
+ 'http://localhost:11434',
+]
diff --git a/app/src/views/preference/OpenAISettings.vue b/app/src/views/preference/OpenAISettings.vue
index 4ec1b73a..6ce6af5d 100644
--- a/app/src/views/preference/OpenAISettings.vue
+++ b/app/src/views/preference/OpenAISettings.vue
@@ -1,29 +1,17 @@
@@ -48,9 +36,10 @@ const models = shallowRef([
: $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. '
+ 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')"
>
-