feat(llm): add models and providers constants for LLM settings

This commit is contained in:
Jacky 2025-02-08 08:03:19 +00:00
parent ed20dc6353
commit 4d82bb7ef8
No known key found for this signature in database
GPG key ID: 215C21B10DF38B4D
2 changed files with 28 additions and 21 deletions

18
app/src/constants/llm.ts Normal file
View file

@ -0,0 +1,18 @@
export const LLM_MODELS = [
'o3-mini',
'o1',
'deepseek-reasoner',
'deepseek-chat',
'gpt-4o-mini',
'gpt-4o',
'gpt-4',
'gpt-4-32k',
'gpt-4-turbo',
'gpt-3.5-turbo',
]
export const LLM_PROVIDERS = [
'https://api.openai.com',
'https://api.deepseek.com',
'http://localhost:11434',
]

View file

@ -1,29 +1,17 @@
<script setup lang="ts">
import type { Settings } from '@/api/settings'
import { LLM_MODELS, LLM_PROVIDERS } from '@/constants/llm'
const data: Settings = inject('data')!
const errors: Record<string, Record<string, string>> = inject('errors') as Record<string, Record<string, string>>
const models = shallowRef([
{
value: 'gpt-4o-mini',
},
{
value: 'gpt-4o',
},
{
value: 'gpt-4-1106-preview',
},
{
value: 'gpt-4',
},
{
value: 'gpt-4-32k',
},
{
value: 'gpt-3.5-turbo',
},
])
const models = LLM_MODELS.map(model => ({
value: model,
}))
const providers = LLM_PROVIDERS.map(provider => ({
value: provider,
}))
</script>
<template>
@ -48,9 +36,10 @@ const models = shallowRef([
: $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. '
+ 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')"
>
<AInput
<AAutoComplete
v-model:value="data.openai.base_url"
:placeholder="$gettext('Leave blank for the default: https://api.openai.com/')"
:options="providers"
/>
</AFormItem>
<AFormItem