mirror of
https://github.com/0xJacky/nginx-ui.git
synced 2025-05-12 19:05:55 +02:00
feat(llm): add models and providers constants for LLM settings
This commit is contained in:
parent
ed20dc6353
commit
4d82bb7ef8
2 changed files with 28 additions and 21 deletions
18
app/src/constants/llm.ts
Normal file
18
app/src/constants/llm.ts
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
export const LLM_MODELS = [
|
||||||
|
'o3-mini',
|
||||||
|
'o1',
|
||||||
|
'deepseek-reasoner',
|
||||||
|
'deepseek-chat',
|
||||||
|
'gpt-4o-mini',
|
||||||
|
'gpt-4o',
|
||||||
|
'gpt-4',
|
||||||
|
'gpt-4-32k',
|
||||||
|
'gpt-4-turbo',
|
||||||
|
'gpt-3.5-turbo',
|
||||||
|
]
|
||||||
|
|
||||||
|
export const LLM_PROVIDERS = [
|
||||||
|
'https://api.openai.com',
|
||||||
|
'https://api.deepseek.com',
|
||||||
|
'http://localhost:11434',
|
||||||
|
]
|
|
@ -1,29 +1,17 @@
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import type { Settings } from '@/api/settings'
|
import type { Settings } from '@/api/settings'
|
||||||
|
import { LLM_MODELS, LLM_PROVIDERS } from '@/constants/llm'
|
||||||
|
|
||||||
const data: Settings = inject('data')!
|
const data: Settings = inject('data')!
|
||||||
const errors: Record<string, Record<string, string>> = inject('errors') as Record<string, Record<string, string>>
|
const errors: Record<string, Record<string, string>> = inject('errors') as Record<string, Record<string, string>>
|
||||||
|
|
||||||
const models = shallowRef([
|
const models = LLM_MODELS.map(model => ({
|
||||||
{
|
value: model,
|
||||||
value: 'gpt-4o-mini',
|
}))
|
||||||
},
|
|
||||||
{
|
const providers = LLM_PROVIDERS.map(provider => ({
|
||||||
value: 'gpt-4o',
|
value: provider,
|
||||||
},
|
}))
|
||||||
{
|
|
||||||
value: 'gpt-4-1106-preview',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
value: 'gpt-4',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
value: 'gpt-4-32k',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
value: 'gpt-3.5-turbo',
|
|
||||||
},
|
|
||||||
])
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<template>
|
<template>
|
||||||
|
@ -48,9 +36,10 @@ const models = shallowRef([
|
||||||
: $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. '
|
: $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. '
|
||||||
+ 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')"
|
+ 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')"
|
||||||
>
|
>
|
||||||
<AInput
|
<AAutoComplete
|
||||||
v-model:value="data.openai.base_url"
|
v-model:value="data.openai.base_url"
|
||||||
:placeholder="$gettext('Leave blank for the default: https://api.openai.com/')"
|
:placeholder="$gettext('Leave blank for the default: https://api.openai.com/')"
|
||||||
|
:options="providers"
|
||||||
/>
|
/>
|
||||||
</AFormItem>
|
</AFormItem>
|
||||||
<AFormItem
|
<AFormItem
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue