Преглед изворни кода

feat(llm): add models and providers constants for LLM settings

Jacky пре 2 месеци
родитељ
комит
4d82bb7ef8
2 измењених фајлова са 28 додато и 21 уклоњено
  1. 18 0
      app/src/constants/llm.ts
  2. 10 21
      app/src/views/preference/OpenAISettings.vue

+ 18 - 0
app/src/constants/llm.ts

@@ -0,0 +1,18 @@
+export const LLM_MODELS = [
+  'o3-mini',
+  'o1',
+  'deepseek-reasoner',
+  'deepseek-chat',
+  'gpt-4o-mini',
+  'gpt-4o',
+  'gpt-4',
+  'gpt-4-32k',
+  'gpt-4-turbo',
+  'gpt-3.5-turbo',
+]
+
+export const LLM_PROVIDERS = [
+  'https://api.openai.com',
+  'https://api.deepseek.com',
+  'http://localhost:11434',
+]

+ 10 - 21
app/src/views/preference/OpenAISettings.vue

@@ -1,29 +1,17 @@
 <script setup lang="ts">
 import type { Settings } from '@/api/settings'
+import { LLM_MODELS, LLM_PROVIDERS } from '@/constants/llm'
 
 const data: Settings = inject('data')!
 const errors: Record<string, Record<string, string>> = inject('errors') as Record<string, Record<string, string>>
 
-const models = shallowRef([
-  {
-    value: 'gpt-4o-mini',
-  },
-  {
-    value: 'gpt-4o',
-  },
-  {
-    value: 'gpt-4-1106-preview',
-  },
-  {
-    value: 'gpt-4',
-  },
-  {
-    value: 'gpt-4-32k',
-  },
-  {
-    value: 'gpt-3.5-turbo',
-  },
-])
+const models = LLM_MODELS.map(model => ({
+  value: model,
+}))
+
+const providers = LLM_PROVIDERS.map(provider => ({
+  value: provider,
+}))
 </script>
 
 <template>
@@ -48,9 +36,10 @@ const models = shallowRef([
         : $gettext('To use a local large model, deploy it with ollama, vllm or lmdeploy. '
           + 'They provide an OpenAI-compatible API endpoint, so just set the baseUrl to your local API.')"
     >
-      <AInput
+      <AAutoComplete
         v-model:value="data.openai.base_url"
         :placeholder="$gettext('Leave blank for the default: https://api.openai.com/')"
+        :options="providers"
       />
     </AFormItem>
     <AFormItem