diff --git a/app/src/language/ar/app.po b/app/src/language/ar/app.po index 13d5ed20..336f017f 100644 --- a/app/src/language/ar/app.po +++ b/app/src/language/ar/app.po @@ -2505,11 +2505,11 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 #, fuzzy msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" -"لاستخدام نموذج كبير محلي، قم بنشره باستخدام vllm أو imdeploy. فهي توفر نقطة " +"لاستخدام نموذج كبير محلي، قم بنشره باستخدام vllm أو lmdeploy. فهي توفر نقطة " "نهاية API متوافقة مع OpenAI، لذا قم فقط بتعيين baseUrl إلىAPI المحلية الخاصة " "بك." diff --git a/app/src/language/de_DE/app.po b/app/src/language/de_DE/app.po index a48a2526..2e26b0bc 100644 --- a/app/src/language/de_DE/app.po +++ b/app/src/language/de_DE/app.po @@ -2657,12 +2657,12 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" "Um ein lokales großes Modell zu verwenden, implementiere es mit ollama, vllm " -"oder imdeploy. Sie bieten einen OpenAI-kompatiblen API-Endpunkt, also setze " +"oder lmdeploy. Sie bieten einen OpenAI-kompatiblen API-Endpunkt, also setze " "die baseUrl auf deine lokale API." #: src/views/preference/OpenAISettings.vue:72 diff --git a/app/src/language/en/app.po b/app/src/language/en/app.po index 0be9c437..cc10376c 100644 --- a/app/src/language/en/app.po +++ b/app/src/language/en/app.po @@ -2598,7 +2598,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/language/es/app.po b/app/src/language/es/app.po index beeaaf49..a0029bfb 100644 --- a/app/src/language/es/app.po +++ b/app/src/language/es/app.po @@ -2579,11 +2579,11 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 #, fuzzy msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" -"Para utilizar un modelo local grande, impleméntelo con vllm o imdeploy. " +"Para utilizar un modelo local grande, impleméntelo con vllm o lmdeploy. " "Estos proporcionan un API endpoint compatible con OpenAI, por lo que solo " "debe configurar la baseUrl en su API local." diff --git a/app/src/language/fr_FR/app.po b/app/src/language/fr_FR/app.po index b22457a3..f4230131 100644 --- a/app/src/language/fr_FR/app.po +++ b/app/src/language/fr_FR/app.po @@ -2621,7 +2621,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/language/ko_KR/app.po b/app/src/language/ko_KR/app.po index d9b4ae49..2b01fe25 100644 --- a/app/src/language/ko_KR/app.po +++ b/app/src/language/ko_KR/app.po @@ -2585,7 +2585,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/language/messages.pot b/app/src/language/messages.pot index 08835f37..f6812235 100644 --- a/app/src/language/messages.pot +++ b/app/src/language/messages.pot @@ -2400,7 +2400,7 @@ msgid "To make sure the certification auto-renewal can work normally, we need to msgstr "" #: src/views/preference/OpenAISettings.vue:48 -msgid "To use a local large model, deploy it with ollama, vllm or imdeploy. They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API." +msgid "To use a local large model, deploy it with ollama, vllm or lmdeploy. They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API." msgstr "" #: src/views/preference/OpenAISettings.vue:72 diff --git a/app/src/language/ru_RU/app.po b/app/src/language/ru_RU/app.po index 33112fd4..a5f5c596 100644 --- a/app/src/language/ru_RU/app.po +++ b/app/src/language/ru_RU/app.po @@ -2565,7 +2565,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/language/tr_TR/app.po b/app/src/language/tr_TR/app.po index c0c22946..1e54ab87 100644 --- a/app/src/language/tr_TR/app.po +++ b/app/src/language/tr_TR/app.po @@ -2779,11 +2779,11 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 #, fuzzy msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" -"Yerel bir büyük model kullanmak için, vllm veya imdeploy ile dağıtın. OpenAI " +"Yerel bir büyük model kullanmak için, vllm veya lmdeploy ile dağıtın. OpenAI " "uyumlu bir API uç noktası sağlarlar, bu nedenle baseUrl'yi yerel API'nize " "ayarlamanız yeterlidir." diff --git a/app/src/language/vi_VN/app.po b/app/src/language/vi_VN/app.po index 3ae463f6..415ebfbd 100644 --- a/app/src/language/vi_VN/app.po +++ b/app/src/language/vi_VN/app.po @@ -2619,7 +2619,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/language/zh_CN/app.po b/app/src/language/zh_CN/app.po index e8195330..dcf0a7d2 100644 --- a/app/src/language/zh_CN/app.po +++ b/app/src/language/zh_CN/app.po @@ -2453,11 +2453,11 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" -"要使用本地大型模型,可使用 ollama、vllm 或 imdeploy 进行部署。它们提供了与 " +"要使用本地大型模型,可使用 ollama、vllm 或 lmdeploy 进行部署。它们提供了与 " "OpenAI 兼容的 API 端点,因此只需将 baseUrl 设置为本地 API 即可。" #: src/views/preference/OpenAISettings.vue:72 diff --git a/app/src/language/zh_TW/app.po b/app/src/language/zh_TW/app.po index f4679cde..e0e82977 100644 --- a/app/src/language/zh_TW/app.po +++ b/app/src/language/zh_TW/app.po @@ -2504,7 +2504,7 @@ msgstr "" #: src/views/preference/OpenAISettings.vue:48 msgid "" -"To use a local large model, deploy it with ollama, vllm or imdeploy. They " +"To use a local large model, deploy it with ollama, vllm or lmdeploy. They " "provide an OpenAI-compatible API endpoint, so just set the baseUrl to your " "local API." msgstr "" diff --git a/app/src/views/preference/OpenAISettings.vue b/app/src/views/preference/OpenAISettings.vue index e6cd3a49..4ec1b73a 100644 --- a/app/src/views/preference/OpenAISettings.vue +++ b/app/src/views/preference/OpenAISettings.vue @@ -45,7 +45,7 @@ const models = shallowRef([ :validate-status="errors?.openai?.base_url ? 'error' : ''" :help="errors?.openai?.base_url === 'url' ? $gettext('The url is invalid.') - : $gettext('To use a local large model, deploy it with ollama, vllm or imdeploy. ' + : $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. ' + 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')" >