mirror of
				https://github.com/zadam/trilium.git
				synced 2025-10-31 02:16:05 +01:00 
			
		
		
		
	feat(llm): change from using precedence list to using a sing specified provider for either chat and/or embeddings
This commit is contained in:
		| @@ -65,7 +65,7 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|  | ||||
|         // Core AI options | ||||
|         this.setupChangeHandler('.ai-enabled', 'aiEnabled', true, true); | ||||
|         this.setupChangeHandler('.ai-provider-precedence', 'aiProviderPrecedence', true); | ||||
|         this.setupChangeHandler('.ai-selected-provider', 'aiSelectedProvider', true); | ||||
|         this.setupChangeHandler('.ai-temperature', 'aiTemperature'); | ||||
|         this.setupChangeHandler('.ai-system-prompt', 'aiSystemPrompt'); | ||||
|  | ||||
| @@ -132,11 +132,28 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         this.setupChangeHandler('.enable-automatic-indexing', 'enableAutomaticIndexing', false, true); | ||||
|         this.setupChangeHandler('.embedding-similarity-threshold', 'embeddingSimilarityThreshold'); | ||||
|         this.setupChangeHandler('.max-notes-per-llm-query', 'maxNotesPerLlmQuery'); | ||||
|         this.setupChangeHandler('.embedding-provider-precedence', 'embeddingProviderPrecedence', true); | ||||
|         this.setupChangeHandler('.embedding-selected-provider', 'embeddingSelectedProvider', true); | ||||
|         this.setupChangeHandler('.embedding-dimension-strategy', 'embeddingDimensionStrategy'); | ||||
|         this.setupChangeHandler('.embedding-batch-size', 'embeddingBatchSize'); | ||||
|         this.setupChangeHandler('.embedding-update-interval', 'embeddingUpdateInterval'); | ||||
|  | ||||
|         // Add provider selection change handlers for dynamic settings visibility | ||||
|         this.$widget.find('.ai-selected-provider').on('change', () => { | ||||
|             const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|             this.$widget.find('.provider-settings').hide(); | ||||
|             if (selectedProvider) { | ||||
|                 this.$widget.find(`.${selectedProvider}-provider-settings`).show(); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         this.$widget.find('.embedding-selected-provider').on('change', () => { | ||||
|             const selectedProvider = this.$widget.find('.embedding-selected-provider').val() as string; | ||||
|             this.$widget.find('.embedding-provider-settings').hide(); | ||||
|             if (selectedProvider) { | ||||
|                 this.$widget.find(`.${selectedProvider}-embedding-provider-settings`).show(); | ||||
|             } | ||||
|         }); | ||||
|  | ||||
|         // No sortable behavior needed anymore | ||||
|  | ||||
|         // Embedding stats refresh button | ||||
| @@ -194,42 +211,25 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         // Get provider precedence | ||||
|         const providerPrecedence = (this.$widget.find('.ai-provider-precedence').val() as string || '').split(','); | ||||
|         // Get selected provider | ||||
|         const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|  | ||||
|         // Check for OpenAI configuration if it's in the precedence list | ||||
|         const openaiWarnings: string[] = []; | ||||
|         if (providerPrecedence.includes('openai')) { | ||||
|         // Check for selected provider configuration | ||||
|         const providerWarnings: string[] = []; | ||||
|         if (selectedProvider === 'openai') { | ||||
|             const openaiApiKey = this.$widget.find('.openai-api-key').val(); | ||||
|             if (!openaiApiKey) { | ||||
|                 openaiWarnings.push(t("ai_llm.empty_key_warning.openai")); | ||||
|                 providerWarnings.push(t("ai_llm.empty_key_warning.openai")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Check for Anthropic configuration if it's in the precedence list | ||||
|         const anthropicWarnings: string[] = []; | ||||
|         if (providerPrecedence.includes('anthropic')) { | ||||
|         } else if (selectedProvider === 'anthropic') { | ||||
|             const anthropicApiKey = this.$widget.find('.anthropic-api-key').val(); | ||||
|             if (!anthropicApiKey) { | ||||
|                 anthropicWarnings.push(t("ai_llm.empty_key_warning.anthropic")); | ||||
|                 providerWarnings.push(t("ai_llm.empty_key_warning.anthropic")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Check for Voyage configuration if it's in the precedence list | ||||
|         const voyageWarnings: string[] = []; | ||||
|         if (providerPrecedence.includes('voyage')) { | ||||
|             const voyageApiKey = this.$widget.find('.voyage-api-key').val(); | ||||
|             if (!voyageApiKey) { | ||||
|                 voyageWarnings.push(t("ai_llm.empty_key_warning.voyage")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Check for Ollama configuration if it's in the precedence list | ||||
|         const ollamaWarnings: string[] = []; | ||||
|         if (providerPrecedence.includes('ollama')) { | ||||
|         } else if (selectedProvider === 'ollama') { | ||||
|             const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val(); | ||||
|             if (!ollamaBaseUrl) { | ||||
|                 ollamaWarnings.push(t("ai_llm.ollama_no_url")); | ||||
|                 providerWarnings.push(t("ai_llm.ollama_no_url")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
| @@ -238,27 +238,24 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         const embeddingsEnabled = this.$widget.find('.enable-automatic-indexing').prop('checked'); | ||||
|  | ||||
|         if (embeddingsEnabled) { | ||||
|             const embeddingProviderPrecedence = (this.$widget.find('.embedding-provider-precedence').val() as string || '').split(','); | ||||
|             const selectedEmbeddingProvider = this.$widget.find('.embedding-selected-provider').val() as string; | ||||
|  | ||||
|             if (embeddingProviderPrecedence.includes('openai') && !this.$widget.find('.openai-api-key').val()) { | ||||
|             if (selectedEmbeddingProvider === 'openai' && !this.$widget.find('.openai-api-key').val()) { | ||||
|                 embeddingWarnings.push(t("ai_llm.empty_key_warning.openai")); | ||||
|             } | ||||
|  | ||||
|             if (embeddingProviderPrecedence.includes('voyage') && !this.$widget.find('.voyage-api-key').val()) { | ||||
|             if (selectedEmbeddingProvider === 'voyage' && !this.$widget.find('.voyage-api-key').val()) { | ||||
|                 embeddingWarnings.push(t("ai_llm.empty_key_warning.voyage")); | ||||
|             } | ||||
|  | ||||
|             if (embeddingProviderPrecedence.includes('ollama') && !this.$widget.find('.ollama-base-url').val()) { | ||||
|             if (selectedEmbeddingProvider === 'ollama' && !this.$widget.find('.ollama-base-url').val()) { | ||||
|                 embeddingWarnings.push(t("ai_llm.empty_key_warning.ollama")); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Combine all warnings | ||||
|         const allWarnings = [ | ||||
|             ...openaiWarnings, | ||||
|             ...anthropicWarnings, | ||||
|             ...voyageWarnings, | ||||
|             ...ollamaWarnings, | ||||
|             ...providerWarnings, | ||||
|             ...embeddingWarnings | ||||
|         ]; | ||||
|  | ||||
| @@ -449,6 +446,27 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Update provider settings visibility based on selected providers | ||||
|      */ | ||||
|     updateProviderSettingsVisibility() { | ||||
|         if (!this.$widget) return; | ||||
|  | ||||
|         // Update AI provider settings visibility | ||||
|         const selectedAiProvider = this.$widget.find('.ai-selected-provider').val() as string; | ||||
|         this.$widget.find('.provider-settings').hide(); | ||||
|         if (selectedAiProvider) { | ||||
|             this.$widget.find(`.${selectedAiProvider}-provider-settings`).show(); | ||||
|         } | ||||
|  | ||||
|         // Update embedding provider settings visibility | ||||
|         const selectedEmbeddingProvider = this.$widget.find('.embedding-selected-provider').val() as string; | ||||
|         this.$widget.find('.embedding-provider-settings').hide(); | ||||
|         if (selectedEmbeddingProvider) { | ||||
|             this.$widget.find(`.${selectedEmbeddingProvider}-embedding-provider-settings`).show(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Called when the options have been loaded from the server | ||||
|      */ | ||||
| @@ -459,30 +477,30 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         this.$widget.find('.ai-enabled').prop('checked', options.aiEnabled !== 'false'); | ||||
|         this.$widget.find('.ai-temperature').val(options.aiTemperature || '0.7'); | ||||
|         this.$widget.find('.ai-system-prompt').val(options.aiSystemPrompt || ''); | ||||
|         this.$widget.find('.ai-provider-precedence').val(options.aiProviderPrecedence || 'openai,anthropic,ollama'); | ||||
|         this.$widget.find('.ai-selected-provider').val(options.aiSelectedProvider || 'openai'); | ||||
|  | ||||
|         // OpenAI Section | ||||
|         this.$widget.find('.openai-api-key').val(options.openaiApiKey || ''); | ||||
|         this.$widget.find('.openai-base-url').val(options.openaiBaseUrl || 'https://api.openai_llm.com/v1'); | ||||
|         this.$widget.find('.openai-default-model').val(options.openaiDefaultModel || 'gpt-4o'); | ||||
|         this.$widget.find('.openai-embedding-model').val(options.openaiEmbeddingModel || 'text-embedding-3-small'); | ||||
|         this.$widget.find('.openai-base-url').val(options.openaiBaseUrl || 'https://api.openai.com/v1'); | ||||
|         this.$widget.find('.openai-default-model').val(options.openaiDefaultModel || ''); | ||||
|         this.$widget.find('.openai-embedding-model').val(options.openaiEmbeddingModel || ''); | ||||
|  | ||||
|         // Anthropic Section | ||||
|         this.$widget.find('.anthropic-api-key').val(options.anthropicApiKey || ''); | ||||
|         this.$widget.find('.anthropic-base-url').val(options.anthropicBaseUrl || 'https://api.anthropic.com'); | ||||
|         this.$widget.find('.anthropic-default-model').val(options.anthropicDefaultModel || 'claude-3-opus-20240229'); | ||||
|         this.$widget.find('.anthropic-default-model').val(options.anthropicDefaultModel || ''); | ||||
|  | ||||
|         // Voyage Section | ||||
|         this.$widget.find('.voyage-api-key').val(options.voyageApiKey || ''); | ||||
|         this.$widget.find('.voyage-embedding-model').val(options.voyageEmbeddingModel || 'voyage-2'); | ||||
|         this.$widget.find('.voyage-embedding-model').val(options.voyageEmbeddingModel || ''); | ||||
|  | ||||
|         // Ollama Section | ||||
|         this.$widget.find('.ollama-base-url').val(options.ollamaBaseUrl || 'http://localhost:11434'); | ||||
|         this.$widget.find('.ollama-default-model').val(options.ollamaDefaultModel || 'llama3'); | ||||
|         this.$widget.find('.ollama-embedding-model').val(options.ollamaEmbeddingModel || 'nomic-embed-text'); | ||||
|         this.$widget.find('.ollama-default-model').val(options.ollamaDefaultModel || ''); | ||||
|         this.$widget.find('.ollama-embedding-model').val(options.ollamaEmbeddingModel || ''); | ||||
|  | ||||
|         // Embedding Options | ||||
|         this.$widget.find('.embedding-provider-precedence').val(options.embeddingProviderPrecedence || 'openai,voyage,ollama,local'); | ||||
|         this.$widget.find('.embedding-selected-provider').val(options.embeddingSelectedProvider || 'openai'); | ||||
|         this.$widget.find('.embedding-auto-update-enabled').prop('checked', options.embeddingAutoUpdateEnabled !== 'false'); | ||||
|         this.$widget.find('.enable-automatic-indexing').prop('checked', options.enableAutomaticIndexing !== 'false'); | ||||
|         this.$widget.find('.embedding-similarity-threshold').val(options.embeddingSimilarityThreshold || '0.75'); | ||||
| @@ -491,6 +509,9 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         this.$widget.find('.embedding-batch-size').val(options.embeddingBatchSize || '10'); | ||||
|         this.$widget.find('.embedding-update-interval').val(options.embeddingUpdateInterval || '5000'); | ||||
|  | ||||
|         // Show/hide provider settings based on selected providers | ||||
|         this.updateProviderSettingsVisibility(); | ||||
|  | ||||
|         // Display validation warnings | ||||
|         this.displayValidationWarnings(); | ||||
|     } | ||||
|   | ||||
| @@ -61,36 +61,19 @@ export const TPL = ` | ||||
|     <h4>${t("ai_llm.provider_configuration")}</h4> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.provider_precedence")}</label> | ||||
|         <input type="text" class="ai-provider-precedence form-control" placeholder="openai,anthropic,ollama"> | ||||
|         <div class="form-text">${t("ai_llm.provider_precedence_description")}</div> | ||||
|         <label>${t("ai_llm.selected_provider")}</label> | ||||
|         <select class="ai-selected-provider form-control"> | ||||
|             <option value="">${t("ai_llm.select_provider")}</option> | ||||
|             <option value="openai">OpenAI</option> | ||||
|             <option value="anthropic">Anthropic</option> | ||||
|             <option value="ollama">Ollama</option> | ||||
|         </select> | ||||
|         <div class="form-text">${t("ai_llm.selected_provider_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.temperature")}</label> | ||||
|         <input class="ai-temperature form-control" type="number" min="0" max="2" step="0.1"> | ||||
|         <div class="form-text">${t("ai_llm.temperature_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.system_prompt")}</label> | ||||
|         <textarea class="ai-system-prompt form-control" rows="3"></textarea> | ||||
|         <div class="form-text">${t("ai_llm.system_prompt_description")}</div> | ||||
|     </div> | ||||
| </div> | ||||
|  | ||||
| <nav class="options-section-tabs"> | ||||
|     <div class="nav nav-tabs" id="nav-tab" role="tablist"> | ||||
|         <button class="nav-link active" id="nav-openai-tab" data-bs-toggle="tab" data-bs-target="#nav-openai" type="button" role="tab" aria-controls="nav-openai" aria-selected="true">${t("ai_llm.openai_tab")}</button> | ||||
|         <button class="nav-link" id="nav-anthropic-tab" data-bs-toggle="tab" data-bs-target="#nav-anthropic" type="button" role="tab" aria-controls="nav-anthropic" aria-selected="false">${t("ai_llm.anthropic_tab")}</button> | ||||
|         <button class="nav-link" id="nav-voyage-tab" data-bs-toggle="tab" data-bs-target="#nav-voyage" type="button" role="tab" aria-controls="nav-voyage" aria-selected="false">${t("ai_llm.voyage_tab")}</button> | ||||
|         <button class="nav-link" id="nav-ollama-tab" data-bs-toggle="tab" data-bs-target="#nav-ollama" type="button" role="tab" aria-controls="nav-ollama" aria-selected="false">${t("ai_llm.ollama_tab")}</button> | ||||
|     </div> | ||||
| </nav> | ||||
| <div class="options-section"> | ||||
|     <div class="tab-content" id="nav-tabContent"> | ||||
|         <div class="tab-pane fade show active" id="nav-openai" role="tabpanel" aria-labelledby="nav-openai-tab"> | ||||
|             <div class="card"> | ||||
|     <!-- OpenAI Provider Settings --> | ||||
|     <div class="provider-settings openai-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.openai_settings")}</h5> | ||||
|             </div> | ||||
| @@ -129,8 +112,10 @@ export const TPL = ` | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|         <div class="tab-pane fade" id="nav-anthropic" role="tabpanel" aria-labelledby="nav-anthropic-tab"> | ||||
|             <div class="card"> | ||||
|  | ||||
|     <!-- Anthropic Provider Settings --> | ||||
|     <div class="provider-settings anthropic-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.anthropic_settings")}</h5> | ||||
|             </div> | ||||
| @@ -160,32 +145,10 @@ export const TPL = ` | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|         <div class="tab-pane fade" id="nav-voyage" role="tabpanel" aria-labelledby="nav-voyage-tab"> | ||||
|             <div class="card"> | ||||
|                 <div class="card-header"> | ||||
|                     <h5>${t("ai_llm.voyage_settings")}</h5> | ||||
|                 </div> | ||||
|                 <div class="card-body"> | ||||
|                     <div class="form-group"> | ||||
|                         <label>${t("ai_llm.api_key")}</label> | ||||
|                         <input type="password" class="voyage-api-key form-control" autocomplete="off" /> | ||||
|                         <div class="form-text">${t("ai_llm.voyage_api_key_description")}</div> | ||||
|                     </div> | ||||
|  | ||||
|                     <div class="form-group"> | ||||
|                         <label>${t("ai_llm.embedding_model")}</label> | ||||
|                         <select class="voyage-embedding-model form-control"> | ||||
|                             <option value="voyage-2">Voyage-2 (recommended)</option> | ||||
|                             <option value="voyage-2-code">Voyage-2-Code</option> | ||||
|                             <option value="voyage-large-2">Voyage-Large-2</option> | ||||
|                         </select> | ||||
|                         <div class="form-text">${t("ai_llm.voyage_embedding_model_description")}</div> | ||||
|                     </div> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|         <div class="tab-pane fade" id="nav-ollama" role="tabpanel" aria-labelledby="nav-ollama-tab"> | ||||
|             <div class="card"> | ||||
|     <!-- Ollama Provider Settings --> | ||||
|     <div class="provider-settings ollama-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.ollama_settings")}</h5> | ||||
|             </div> | ||||
| @@ -218,16 +181,112 @@ export const TPL = ` | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.temperature")}</label> | ||||
|         <input class="ai-temperature form-control" type="number" min="0" max="2" step="0.1"> | ||||
|         <div class="form-text">${t("ai_llm.temperature_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label>${t("ai_llm.system_prompt")}</label> | ||||
|         <textarea class="ai-system-prompt form-control" rows="3"></textarea> | ||||
|         <div class="form-text">${t("ai_llm.system_prompt_description")}</div> | ||||
|     </div> | ||||
| </div> | ||||
|  | ||||
|  | ||||
| <div class="options-section"> | ||||
|     <h4>${t("ai_llm.embeddings_configuration")}</h4> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|         <label class="embedding-provider-label">${t("ai_llm.embedding_provider_precedence")}</label> | ||||
|         <input type="text" class="embedding-provider-precedence form-control" placeholder="openai,voyage,ollama,local"> | ||||
|         <div class="form-text">${t("ai_llm.embedding_provider_precedence_description")}</div> | ||||
|         <label class="embedding-provider-label">${t("ai_llm.selected_embedding_provider")}</label> | ||||
|         <select class="embedding-selected-provider form-control"> | ||||
|             <option value="">${t("ai_llm.select_embedding_provider")}</option> | ||||
|             <option value="openai">OpenAI</option> | ||||
|             <option value="voyage">Voyage AI</option> | ||||
|             <option value="ollama">Ollama</option> | ||||
|             <option value="local">Local</option> | ||||
|         </select> | ||||
|         <div class="form-text">${t("ai_llm.selected_embedding_provider_description")}</div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- OpenAI Embedding Provider Settings --> | ||||
|     <div class="embedding-provider-settings openai-embedding-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.openai_embedding_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.embedding_model")}</label> | ||||
|                     <select class="openai-embedding-model form-control"> | ||||
|                         <option value="text-embedding-3-small">text-embedding-3-small (recommended)</option> | ||||
|                         <option value="text-embedding-3-large">text-embedding-3-large</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.openai_embedding_model_description")}</div> | ||||
|                 </div> | ||||
|                 <div class="form-text text-muted">${t("ai_llm.openai_embedding_shared_settings")}</div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- Voyage Embedding Provider Settings --> | ||||
|     <div class="embedding-provider-settings voyage-embedding-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.voyage_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.api_key")}</label> | ||||
|                     <input type="password" class="voyage-api-key form-control" autocomplete="off" /> | ||||
|                     <div class="form-text">${t("ai_llm.voyage_api_key_description")}</div> | ||||
|                 </div> | ||||
|  | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.embedding_model")}</label> | ||||
|                     <select class="voyage-embedding-model form-control"> | ||||
|                         <option value="voyage-2">Voyage-2 (recommended)</option> | ||||
|                         <option value="voyage-2-code">Voyage-2-Code</option> | ||||
|                         <option value="voyage-large-2">Voyage-Large-2</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.voyage_embedding_model_description")}</div> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- Ollama Embedding Provider Settings --> | ||||
|     <div class="embedding-provider-settings ollama-embedding-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.ollama_embedding_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.embedding_model")}</label> | ||||
|                     <select class="ollama-embedding-model form-control"> | ||||
|                         <option value="nomic-embed-text">nomic-embed-text (recommended)</option> | ||||
|                         <option value="all-MiniLM-L6-v2">all-MiniLM-L6-v2</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.ollama_embedding_model_description")}</div> | ||||
|                 </div> | ||||
|                 <div class="form-text text-muted">${t("ai_llm.ollama_embedding_shared_settings")}</div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <!-- Local Embedding Provider Settings --> | ||||
|     <div class="embedding-provider-settings local-embedding-provider-settings" style="display: none;"> | ||||
|         <div class="card mt-3"> | ||||
|             <div class="card-header"> | ||||
|                 <h5>${t("ai_llm.local_embedding_settings")}</h5> | ||||
|             </div> | ||||
|             <div class="card-body"> | ||||
|                 <div class="form-text">${t("ai_llm.local_embedding_description")}</div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
|     <div class="form-group"> | ||||
|   | ||||
| @@ -95,7 +95,7 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([ | ||||
|     "aiEnabled", | ||||
|     "aiTemperature", | ||||
|     "aiSystemPrompt", | ||||
|     "aiProviderPrecedence", | ||||
|     "aiSelectedProvider", | ||||
|     "openaiApiKey", | ||||
|     "openaiBaseUrl", | ||||
|     "openaiDefaultModel", | ||||
| @@ -110,7 +110,7 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([ | ||||
|     "ollamaEmbeddingModel", | ||||
|     "embeddingAutoUpdateEnabled", | ||||
|     "embeddingDimensionStrategy", | ||||
|     "embeddingProviderPrecedence", | ||||
|     "embeddingSelectedProvider", | ||||
|     "embeddingSimilarityThreshold", | ||||
|     "embeddingBatchSize", | ||||
|     "embeddingUpdateInterval", | ||||
|   | ||||
| @@ -1,4 +1,5 @@ | ||||
| import options from '../options.js'; | ||||
| import eventService from '../events.js'; | ||||
| import type { AIService, ChatCompletionOptions, ChatResponse, Message } from './ai_interface.js'; | ||||
| import { AnthropicService } from './providers/anthropic_service.js'; | ||||
| import { ContextExtractor } from './context/index.js'; | ||||
| @@ -20,9 +21,8 @@ import type { NoteSearchResult } from './interfaces/context_interfaces.js'; | ||||
|  | ||||
| // Import new configuration system | ||||
| import { | ||||
|     getProviderPrecedence, | ||||
|     getPreferredProvider, | ||||
|     getEmbeddingProviderPrecedence, | ||||
|     getSelectedProvider, | ||||
|     getSelectedEmbeddingProvider, | ||||
|     parseModelIdentifier, | ||||
|     isAIEnabled, | ||||
|     getDefaultModelForProvider, | ||||
| @@ -60,6 +60,9 @@ export class AIServiceManager implements IAIServiceManager { | ||||
|         this.initializeTools().catch(error => { | ||||
|             log.error(`Error initializing LLM tools during AIServiceManager construction: ${error.message || String(error)}`); | ||||
|         }); | ||||
|  | ||||
|         // Set up event listener for provider changes | ||||
|         this.setupProviderChangeListener(); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
| @@ -84,16 +87,21 @@ export class AIServiceManager implements IAIServiceManager { | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Update the provider precedence order using the new configuration system | ||||
|      * Update the provider order using the new configuration system (single provider) | ||||
|      */ | ||||
|     async updateProviderOrderAsync(): Promise<void> { | ||||
|         try { | ||||
|             const providers = await getProviderPrecedence(); | ||||
|             this.providerOrder = providers as ServiceProviders[]; | ||||
|             const selectedProvider = await getSelectedProvider(); | ||||
|             if (selectedProvider) { | ||||
|                 this.providerOrder = [selectedProvider as ServiceProviders]; | ||||
|                 log.info(`Updated provider order: ${selectedProvider}`); | ||||
|             } else { | ||||
|                 this.providerOrder = []; | ||||
|                 log.info('No provider selected'); | ||||
|             } | ||||
|             this.initialized = true; | ||||
|             log.info(`Updated provider order: ${providers.join(', ')}`); | ||||
|         } catch (error) { | ||||
|             log.error(`Failed to get provider precedence: ${error}`); | ||||
|             log.error(`Failed to get selected provider: ${error}`); | ||||
|             // Keep empty order, will be handled gracefully by other methods | ||||
|             this.providerOrder = []; | ||||
|             this.initialized = true; | ||||
| @@ -521,13 +529,13 @@ export class AIServiceManager implements IAIServiceManager { | ||||
|      */ | ||||
|     async getPreferredProviderAsync(): Promise<string> { | ||||
|         try { | ||||
|             const preferredProvider = await getPreferredProvider(); | ||||
|             if (preferredProvider === null) { | ||||
|                 // No providers configured, fallback to first available | ||||
|                 log.info('No providers configured in precedence, using first available provider'); | ||||
|             const selectedProvider = await getSelectedProvider(); | ||||
|             if (selectedProvider === null) { | ||||
|                 // No provider selected, fallback to first available | ||||
|                 log.info('No provider selected, using first available provider'); | ||||
|                 return this.providerOrder[0]; | ||||
|             } | ||||
|             return preferredProvider; | ||||
|             return selectedProvider; | ||||
|         } catch (error) { | ||||
|             log.error(`Error getting preferred provider: ${error}`); | ||||
|             return this.providerOrder[0]; | ||||
| @@ -580,6 +588,7 @@ export class AIServiceManager implements IAIServiceManager { | ||||
|         }; | ||||
|     } | ||||
|  | ||||
|  | ||||
|     /** | ||||
|      * Error handler that properly types the error object | ||||
|      */ | ||||
| @@ -589,6 +598,75 @@ export class AIServiceManager implements IAIServiceManager { | ||||
|         } | ||||
|         return String(error); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Set up event listener for provider changes | ||||
|      */ | ||||
|     private setupProviderChangeListener(): void { | ||||
|         // List of AI-related options that should trigger service recreation | ||||
|         const aiRelatedOptions = [ | ||||
|             'aiSelectedProvider', | ||||
|             'embeddingSelectedProvider', | ||||
|             'openaiApiKey', | ||||
|             'openaiBaseUrl',  | ||||
|             'openaiDefaultModel', | ||||
|             'anthropicApiKey', | ||||
|             'anthropicBaseUrl', | ||||
|             'anthropicDefaultModel', | ||||
|             'ollamaBaseUrl', | ||||
|             'ollamaDefaultModel', | ||||
|             'voyageApiKey' | ||||
|         ]; | ||||
|  | ||||
|         eventService.subscribe(['entityChanged'], ({ entityName, entity }) => { | ||||
|             if (entityName === 'options' && entity && aiRelatedOptions.includes(entity.name)) { | ||||
|                 log.info(`AI-related option '${entity.name}' changed, recreating LLM services`); | ||||
|                 this.recreateServices(); | ||||
|             } | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Recreate LLM services when provider settings change | ||||
|      */ | ||||
|     private async recreateServices(): Promise<void> { | ||||
|         try { | ||||
|             log.info('Recreating LLM services due to configuration change'); | ||||
|  | ||||
|             // Clear configuration cache first | ||||
|             clearConfigurationCache(); | ||||
|  | ||||
|             // Recreate all service instances to pick up new configuration | ||||
|             this.recreateServiceInstances(); | ||||
|  | ||||
|             // Update provider order with new configuration | ||||
|             await this.updateProviderOrderAsync(); | ||||
|  | ||||
|             log.info('LLM services recreated successfully'); | ||||
|         } catch (error) { | ||||
|             log.error(`Error recreating LLM services: ${this.handleError(error)}`); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Recreate service instances to pick up new configuration | ||||
|      */ | ||||
|     private recreateServiceInstances(): void { | ||||
|         try { | ||||
|             log.info('Recreating service instances'); | ||||
|  | ||||
|             // Recreate service instances | ||||
|             this.services = { | ||||
|                 openai: new OpenAIService(), | ||||
|                 anthropic: new AnthropicService(), | ||||
|                 ollama: new OllamaService() | ||||
|             }; | ||||
|  | ||||
|             log.info('Service instances recreated successfully'); | ||||
|         } catch (error) { | ||||
|             log.error(`Error recreating service instances: ${this.handleError(error)}`); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| // Don't create singleton immediately, use a lazy-loading pattern | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
| import configurationManager from './configuration_manager.js'; | ||||
| import optionService from '../../options.js'; | ||||
| import type { | ||||
|     ProviderType, | ||||
|     ModelIdentifier, | ||||
|     ModelConfig, | ||||
|     ProviderPrecedenceConfig, | ||||
|     EmbeddingProviderPrecedenceConfig | ||||
| } from '../interfaces/configuration_interfaces.js'; | ||||
|  | ||||
| /** | ||||
| @@ -13,41 +12,19 @@ import type { | ||||
|  */ | ||||
|  | ||||
| /** | ||||
|  * Get the ordered list of AI providers | ||||
|  * Get the selected AI provider | ||||
|  */ | ||||
| export async function getProviderPrecedence(): Promise<ProviderType[]> { | ||||
|     const config = await configurationManager.getProviderPrecedence(); | ||||
|     return config.providers; | ||||
| export async function getSelectedProvider(): Promise<ProviderType | null> { | ||||
|     const providerOption = optionService.getOption('aiSelectedProvider'); | ||||
|     return providerOption as ProviderType || null; | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Get the default/preferred AI provider | ||||
|  * Get the selected embedding provider | ||||
|  */ | ||||
| export async function getPreferredProvider(): Promise<ProviderType | null> { | ||||
|     const config = await configurationManager.getProviderPrecedence(); | ||||
|     if (config.providers.length === 0) { | ||||
|         return null; // No providers configured | ||||
|     } | ||||
|     return config.defaultProvider || config.providers[0]; | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Get the ordered list of embedding providers | ||||
|  */ | ||||
| export async function getEmbeddingProviderPrecedence(): Promise<string[]> { | ||||
|     const config = await configurationManager.getEmbeddingProviderPrecedence(); | ||||
|     return config.providers; | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Get the default embedding provider | ||||
|  */ | ||||
| export async function getPreferredEmbeddingProvider(): Promise<string | null> { | ||||
|     const config = await configurationManager.getEmbeddingProviderPrecedence(); | ||||
|     if (config.providers.length === 0) { | ||||
|         return null; // No providers configured | ||||
|     } | ||||
|     return config.defaultProvider || config.providers[0]; | ||||
| export async function getSelectedEmbeddingProvider(): Promise<string | null> { | ||||
|     const providerOption = optionService.getOption('embeddingSelectedProvider'); | ||||
|     return providerOption || null; | ||||
| } | ||||
|  | ||||
| /** | ||||
| @@ -107,22 +84,20 @@ export async function isProviderConfigured(provider: ProviderType): Promise<bool | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Get the first available (configured) provider from the precedence list | ||||
|  * Get the currently selected provider if it's available and configured | ||||
|  */ | ||||
| export async function getFirstAvailableProvider(): Promise<ProviderType | null> { | ||||
|     const providers = await getProviderPrecedence(); | ||||
| export async function getAvailableSelectedProvider(): Promise<ProviderType | null> { | ||||
|     const selectedProvider = await getSelectedProvider(); | ||||
|      | ||||
|     if (providers.length === 0) { | ||||
|         return null; // No providers configured | ||||
|     if (!selectedProvider) { | ||||
|         return null; // No provider selected | ||||
|     } | ||||
|  | ||||
|     for (const provider of providers) { | ||||
|         if (await isProviderConfigured(provider)) { | ||||
|             return provider; | ||||
|         } | ||||
|     if (await isProviderConfigured(selectedProvider)) { | ||||
|         return selectedProvider; | ||||
|     } | ||||
|  | ||||
|     return null; // No providers are properly configured | ||||
|     return null; // Selected provider is not properly configured | ||||
| } | ||||
|  | ||||
| /** | ||||
| @@ -163,17 +138,59 @@ export async function getValidModelConfig(provider: ProviderType): Promise<{ mod | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Get the first valid model configuration from the provider precedence list | ||||
|  * Get the model configuration for the currently selected provider | ||||
|  */ | ||||
| export async function getSelectedModelConfig(): Promise<{ model: string; provider: ProviderType } | null> { | ||||
|     const selectedProvider = await getSelectedProvider(); | ||||
|      | ||||
|     if (!selectedProvider) { | ||||
|         return null; // No provider selected | ||||
|     } | ||||
|  | ||||
|     return await getValidModelConfig(selectedProvider); | ||||
| } | ||||
|  | ||||
| // Legacy support functions - these maintain backwards compatibility but now use single provider logic | ||||
| /** | ||||
|  * @deprecated Use getSelectedProvider() instead | ||||
|  */ | ||||
| export async function getProviderPrecedence(): Promise<ProviderType[]> { | ||||
|     const selected = await getSelectedProvider(); | ||||
|     return selected ? [selected] : []; | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * @deprecated Use getSelectedProvider() instead   | ||||
|  */ | ||||
| export async function getPreferredProvider(): Promise<ProviderType | null> { | ||||
|     return await getSelectedProvider(); | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * @deprecated Use getSelectedEmbeddingProvider() instead | ||||
|  */ | ||||
| export async function getEmbeddingProviderPrecedence(): Promise<string[]> { | ||||
|     const selected = await getSelectedEmbeddingProvider(); | ||||
|     return selected ? [selected] : []; | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * @deprecated Use getSelectedEmbeddingProvider() instead | ||||
|  */ | ||||
| export async function getPreferredEmbeddingProvider(): Promise<string | null> { | ||||
|     return await getSelectedEmbeddingProvider(); | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * @deprecated Use getAvailableSelectedProvider() instead | ||||
|  */ | ||||
| export async function getFirstAvailableProvider(): Promise<ProviderType | null> { | ||||
|     return await getAvailableSelectedProvider(); | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * @deprecated Use getSelectedModelConfig() instead | ||||
|  */ | ||||
| export async function getFirstValidModelConfig(): Promise<{ model: string; provider: ProviderType } | null> { | ||||
|     const providers = await getProviderPrecedence(); | ||||
|  | ||||
|     for (const provider of providers) { | ||||
|         const config = await getValidModelConfig(provider); | ||||
|         if (config) { | ||||
|             return config; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     return null; // No valid model configuration found | ||||
|     return await getSelectedModelConfig(); | ||||
| } | ||||
| @@ -50,8 +50,8 @@ export class ConfigurationManager { | ||||
|         try { | ||||
|             const config: AIConfig = { | ||||
|                 enabled: await this.getAIEnabled(), | ||||
|                 providerPrecedence: await this.getProviderPrecedence(), | ||||
|                 embeddingProviderPrecedence: await this.getEmbeddingProviderPrecedence(), | ||||
|                 selectedProvider: await this.getSelectedProvider(), | ||||
|                 selectedEmbeddingProvider: await this.getSelectedEmbeddingProvider(), | ||||
|                 defaultModels: await this.getDefaultModels(), | ||||
|                 providerSettings: await this.getProviderSettings() | ||||
|             }; | ||||
| @@ -66,46 +66,28 @@ export class ConfigurationManager { | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Parse provider precedence from string option | ||||
|      * Get the selected AI provider | ||||
|      */ | ||||
|     public async getProviderPrecedence(): Promise<ProviderPrecedenceConfig> { | ||||
|     public async getSelectedProvider(): Promise<ProviderType | null> { | ||||
|         try { | ||||
|             const precedenceOption = await options.getOption('aiProviderPrecedence'); | ||||
|             const providers = this.parseProviderList(precedenceOption); | ||||
|  | ||||
|             return { | ||||
|                 providers: providers as ProviderType[], | ||||
|                 defaultProvider: providers.length > 0 ? providers[0] as ProviderType : undefined | ||||
|             }; | ||||
|             const selectedProvider = await options.getOption('aiSelectedProvider'); | ||||
|             return selectedProvider as ProviderType || null; | ||||
|         } catch (error) { | ||||
|             log.error(`Error parsing provider precedence: ${error}`); | ||||
|             // Only return known providers if they exist, don't assume defaults | ||||
|             return { | ||||
|                 providers: [], | ||||
|                 defaultProvider: undefined | ||||
|             }; | ||||
|             log.error(`Error getting selected provider: ${error}`); | ||||
|             return null; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Parse embedding provider precedence from string option | ||||
|      * Get the selected embedding provider | ||||
|      */ | ||||
|     public async getEmbeddingProviderPrecedence(): Promise<EmbeddingProviderPrecedenceConfig> { | ||||
|     public async getSelectedEmbeddingProvider(): Promise<EmbeddingProviderType | null> { | ||||
|         try { | ||||
|             const precedenceOption = await options.getOption('embeddingProviderPrecedence'); | ||||
|             const providers = this.parseProviderList(precedenceOption); | ||||
|  | ||||
|             return { | ||||
|                 providers: providers as EmbeddingProviderType[], | ||||
|                 defaultProvider: providers.length > 0 ? providers[0] as EmbeddingProviderType : undefined | ||||
|             }; | ||||
|             const selectedProvider = await options.getOption('embeddingSelectedProvider'); | ||||
|             return selectedProvider as EmbeddingProviderType || null; | ||||
|         } catch (error) { | ||||
|             log.error(`Error parsing embedding provider precedence: ${error}`); | ||||
|             // Don't assume defaults, return empty configuration | ||||
|             return { | ||||
|                 providers: [], | ||||
|                 defaultProvider: undefined | ||||
|             }; | ||||
|             log.error(`Error getting selected embedding provider: ${error}`); | ||||
|             return null; | ||||
|         } | ||||
|     } | ||||
|  | ||||
| @@ -265,31 +247,29 @@ export class ConfigurationManager { | ||||
|                 return result; | ||||
|             } | ||||
|  | ||||
|             // Validate provider precedence | ||||
|             if (config.providerPrecedence.providers.length === 0) { | ||||
|                 result.errors.push('No providers configured in precedence list'); | ||||
|             // Validate selected provider | ||||
|             if (!config.selectedProvider) { | ||||
|                 result.errors.push('No AI provider selected'); | ||||
|                 result.isValid = false; | ||||
|             } | ||||
|             } else { | ||||
|                 // Validate selected provider settings | ||||
|                 const providerConfig = config.providerSettings[config.selectedProvider]; | ||||
|  | ||||
|             // Validate provider settings | ||||
|             for (const provider of config.providerPrecedence.providers) { | ||||
|                 const providerConfig = config.providerSettings[provider]; | ||||
|  | ||||
|                 if (provider === 'openai') { | ||||
|                 if (config.selectedProvider === 'openai') { | ||||
|                     const openaiConfig = providerConfig as OpenAISettings | undefined; | ||||
|                     if (!openaiConfig?.apiKey) { | ||||
|                         result.warnings.push('OpenAI API key is not configured'); | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|                 if (provider === 'anthropic') { | ||||
|                 if (config.selectedProvider === 'anthropic') { | ||||
|                     const anthropicConfig = providerConfig as AnthropicSettings | undefined; | ||||
|                     if (!anthropicConfig?.apiKey) { | ||||
|                         result.warnings.push('Anthropic API key is not configured'); | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|                 if (provider === 'ollama') { | ||||
|                 if (config.selectedProvider === 'ollama') { | ||||
|                     const ollamaConfig = providerConfig as OllamaSettings | undefined; | ||||
|                     if (!ollamaConfig?.baseUrl) { | ||||
|                         result.warnings.push('Ollama base URL is not configured'); | ||||
| @@ -297,6 +277,11 @@ export class ConfigurationManager { | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             // Validate selected embedding provider | ||||
|             if (!config.selectedEmbeddingProvider) { | ||||
|                 result.warnings.push('No embedding provider selected'); | ||||
|             } | ||||
|  | ||||
|         } catch (error) { | ||||
|             result.errors.push(`Configuration validation error: ${error}`); | ||||
|             result.isValid = false; | ||||
| @@ -356,14 +341,8 @@ export class ConfigurationManager { | ||||
|     private getDefaultConfig(): AIConfig { | ||||
|         return { | ||||
|             enabled: false, | ||||
|             providerPrecedence: { | ||||
|                 providers: [], | ||||
|                 defaultProvider: undefined | ||||
|             }, | ||||
|             embeddingProviderPrecedence: { | ||||
|                 providers: [], | ||||
|                 defaultProvider: undefined | ||||
|             }, | ||||
|             selectedProvider: null, | ||||
|             selectedEmbeddingProvider: null, | ||||
|             defaultModels: { | ||||
|                 openai: undefined, | ||||
|                 anthropic: undefined, | ||||
|   | ||||
| @@ -1,51 +1,32 @@ | ||||
| import options from '../../../options.js'; | ||||
| import log from '../../../log.js'; | ||||
| import { getEmbeddingProvider, getEnabledEmbeddingProviders } from '../../providers/providers.js'; | ||||
| import { getSelectedEmbeddingProvider } from '../../config/configuration_helpers.js'; | ||||
|  | ||||
| /** | ||||
|  * Manages embedding providers for context services | ||||
|  */ | ||||
| export class ProviderManager { | ||||
|     /** | ||||
|      * Get the preferred embedding provider based on user settings | ||||
|      * Tries to use the most appropriate provider in this order: | ||||
|      * 1. User's configured default provider | ||||
|      * 2. OpenAI if API key is set | ||||
|      * 3. Anthropic if API key is set | ||||
|      * 4. Ollama if configured | ||||
|      * 5. Any available provider | ||||
|      * 6. Local provider as fallback | ||||
|      * Get the selected embedding provider based on user settings | ||||
|      * Uses the single provider selection approach | ||||
|      * | ||||
|      * @returns The preferred embedding provider or null if none available | ||||
|      * @returns The selected embedding provider or null if none available | ||||
|      */ | ||||
|     async getPreferredEmbeddingProvider(): Promise<any> { | ||||
|         try { | ||||
|             // Try to get providers based on precedence list | ||||
|             const precedenceOption = await options.getOption('embeddingProviderPrecedence'); | ||||
|             let precedenceList: string[] = []; | ||||
|             // Get the selected embedding provider | ||||
|             const selectedProvider = await getSelectedEmbeddingProvider(); | ||||
|              | ||||
|             if (precedenceOption) { | ||||
|                 if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) { | ||||
|                     precedenceList = JSON.parse(precedenceOption); | ||||
|                 } else if (typeof precedenceOption === 'string') { | ||||
|                     if (precedenceOption.includes(',')) { | ||||
|                         precedenceList = precedenceOption.split(',').map(p => p.trim()); | ||||
|                     } else { | ||||
|                         precedenceList = [precedenceOption]; | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             // Try each provider in the precedence list | ||||
|             for (const providerId of precedenceList) { | ||||
|                 const provider = await getEmbeddingProvider(providerId); | ||||
|             if (selectedProvider) { | ||||
|                 const provider = await getEmbeddingProvider(selectedProvider); | ||||
|                 if (provider) { | ||||
|                     log.info(`Using embedding provider from precedence list: ${providerId}`); | ||||
|                     log.info(`Using selected embedding provider: ${selectedProvider}`); | ||||
|                     return provider; | ||||
|                 } | ||||
|                 log.info(`Selected embedding provider ${selectedProvider} is not available`); | ||||
|             } | ||||
|  | ||||
|             // If no provider from precedence list is available, try any enabled provider | ||||
|             // If no provider is selected or available, try any enabled provider | ||||
|             const providers = await getEnabledEmbeddingProviders(); | ||||
|             if (providers.length > 0) { | ||||
|                 log.info(`Using available embedding provider: ${providers[0].name}`); | ||||
|   | ||||
| @@ -497,40 +497,24 @@ export class IndexService { | ||||
|                 throw new Error("No embedding providers available"); | ||||
|             } | ||||
|  | ||||
|             // Get the embedding provider precedence | ||||
|             // Get the selected embedding provider | ||||
|             const options = (await import('../options.js')).default; | ||||
|             let preferredProviders: string[] = []; | ||||
|  | ||||
|             const embeddingPrecedence = await options.getOption('embeddingProviderPrecedence'); | ||||
|             const selectedEmbeddingProvider = await options.getOption('embeddingSelectedProvider'); | ||||
|             let provider; | ||||
|  | ||||
|             if (embeddingPrecedence) { | ||||
|                 // Parse the precedence string | ||||
|                 if (embeddingPrecedence.startsWith('[') && embeddingPrecedence.endsWith(']')) { | ||||
|                     preferredProviders = JSON.parse(embeddingPrecedence); | ||||
|                 } else if (typeof embeddingPrecedence === 'string') { | ||||
|                     if (embeddingPrecedence.includes(',')) { | ||||
|                         preferredProviders = embeddingPrecedence.split(',').map(p => p.trim()); | ||||
|                     } else { | ||||
|                         preferredProviders = [embeddingPrecedence]; | ||||
|                     } | ||||
|                 } | ||||
|             if (selectedEmbeddingProvider) { | ||||
|                 // Try to use the selected provider | ||||
|                 const enabledProviders = await providerManager.getEnabledEmbeddingProviders(); | ||||
|                 provider = enabledProviders.find(p => p.name === selectedEmbeddingProvider); | ||||
|                  | ||||
|                 // Find first enabled provider by precedence order | ||||
|                 for (const providerName of preferredProviders) { | ||||
|                     const matchedProvider = providers.find(p => p.name === providerName); | ||||
|                     if (matchedProvider) { | ||||
|                         provider = matchedProvider; | ||||
|                         break; | ||||
|                     } | ||||
|                 } | ||||
|  | ||||
|                 // If no match found, use first available | ||||
|                 if (!provider && providers.length > 0) { | ||||
|                 if (!provider) { | ||||
|                     log.info(`Selected embedding provider ${selectedEmbeddingProvider} is not available, using first enabled provider`); | ||||
|                     // Fall back to first enabled provider | ||||
|                     provider = providers[0]; | ||||
|                 } | ||||
|             } else { | ||||
|                 // Default to first available provider | ||||
|                 // No provider selected, use first available provider | ||||
|                 log.info('No embedding provider selected, using first available provider'); | ||||
|                 provider = providers[0]; | ||||
|             } | ||||
|  | ||||
|   | ||||
| @@ -46,8 +46,8 @@ export interface ModelCapabilities { | ||||
|  */ | ||||
| export interface AIConfig { | ||||
|     enabled: boolean; | ||||
|     providerPrecedence: ProviderPrecedenceConfig; | ||||
|     embeddingProviderPrecedence: EmbeddingProviderPrecedenceConfig; | ||||
|     selectedProvider: ProviderType | null; | ||||
|     selectedEmbeddingProvider: EmbeddingProviderType | null; | ||||
|     defaultModels: Record<ProviderType, string | undefined>; | ||||
|     providerSettings: ProviderSettings; | ||||
| } | ||||
| @@ -87,7 +87,7 @@ export type ProviderType = 'openai' | 'anthropic' | 'ollama'; | ||||
| /** | ||||
|  * Valid embedding provider types | ||||
|  */ | ||||
| export type EmbeddingProviderType = 'openai' | 'ollama' | 'local'; | ||||
| export type EmbeddingProviderType = 'openai' | 'voyage' | 'ollama' | 'local'; | ||||
|  | ||||
| /** | ||||
|  * Model identifier with provider prefix (e.g., "openai:gpt-4" or "ollama:llama2") | ||||
|   | ||||
| @@ -11,8 +11,7 @@ import type { ServiceProviders } from '../../interfaces/ai_service_interfaces.js | ||||
|  | ||||
| // Import new configuration system | ||||
| import { | ||||
|     getProviderPrecedence, | ||||
|     getPreferredProvider, | ||||
|     getSelectedProvider, | ||||
|     parseModelIdentifier, | ||||
|     getDefaultModelForProvider, | ||||
|     createModelConfig | ||||
| @@ -99,22 +98,30 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput, | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // Get default provider and model using the new configuration system | ||||
|         // Get selected provider and model using the new configuration system | ||||
|         try { | ||||
|             // Use the new configuration helpers - no string parsing! | ||||
|             const preferredProvider = await getPreferredProvider(); | ||||
|             const selectedProvider = await getSelectedProvider(); | ||||
|  | ||||
|             if (!preferredProvider) { | ||||
|                 throw new Error('No AI providers are configured. Please check your AI settings.'); | ||||
|             if (!selectedProvider) { | ||||
|                 throw new Error('No AI provider is selected. Please select a provider in your AI settings.'); | ||||
|             } | ||||
|  | ||||
|             const modelName = await getDefaultModelForProvider(preferredProvider); | ||||
|             const modelName = await getDefaultModelForProvider(selectedProvider); | ||||
|  | ||||
|             if (!modelName) { | ||||
|                 throw new Error(`No default model configured for provider ${preferredProvider}. Please set a default model in your AI settings.`); | ||||
|                 // Try to fetch and set a default model from the provider | ||||
|                 const fetchedModel = await this.fetchAndSetDefaultModel(selectedProvider); | ||||
|                 if (!fetchedModel) { | ||||
|                     throw new Error(`No default model configured for provider ${selectedProvider}. Please set a default model in your AI settings.`); | ||||
|                 } | ||||
|                 // Use the fetched model | ||||
|                 updatedOptions.model = fetchedModel; | ||||
|             } else { | ||||
|                 updatedOptions.model = modelName; | ||||
|             } | ||||
|  | ||||
|             log.info(`Selected provider: ${preferredProvider}, model: ${modelName}`); | ||||
|             log.info(`Selected provider: ${selectedProvider}, model: ${updatedOptions.model}`); | ||||
|  | ||||
|             // Determine query complexity | ||||
|             let queryComplexity = 'low'; | ||||
| @@ -142,15 +149,14 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput, | ||||
|                 queryComplexity = contentLength > SEARCH_CONSTANTS.CONTEXT.CONTENT_LENGTH.HIGH_THRESHOLD ? 'high' : 'medium'; | ||||
|             } | ||||
|  | ||||
|             // Set the model and add provider metadata | ||||
|             updatedOptions.model = modelName; | ||||
|             this.addProviderMetadata(updatedOptions, preferredProvider as ServiceProviders, modelName); | ||||
|             // Add provider metadata (model is already set above) | ||||
|             this.addProviderMetadata(updatedOptions, selectedProvider as ServiceProviders, updatedOptions.model); | ||||
|  | ||||
|             log.info(`Selected model: ${modelName} from provider: ${preferredProvider} for query complexity: ${queryComplexity}`); | ||||
|             log.info(`Selected model: ${updatedOptions.model} from provider: ${selectedProvider} for query complexity: ${queryComplexity}`); | ||||
|             log.info(`[ModelSelectionStage] Final options: ${JSON.stringify({ | ||||
|                 model: updatedOptions.model, | ||||
|                 stream: updatedOptions.stream, | ||||
|                 provider: preferredProvider, | ||||
|                 provider: selectedProvider, | ||||
|                 enableTools: updatedOptions.enableTools | ||||
|             })}`); | ||||
|  | ||||
| @@ -210,38 +216,38 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput, | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Determine model based on provider precedence using the new configuration system | ||||
|      * Determine model based on selected provider using the new configuration system | ||||
|      */ | ||||
|     private async determineDefaultModel(input: ModelSelectionInput): Promise<string> { | ||||
|         try { | ||||
|             // Use the new configuration system | ||||
|             const providers = await getProviderPrecedence(); | ||||
|             // Use the new single provider configuration system | ||||
|             const selectedProvider = await getSelectedProvider(); | ||||
|  | ||||
|             // Use only providers that are available | ||||
|             const availableProviders = providers.filter(provider => | ||||
|                 aiServiceManager.isProviderAvailable(provider)); | ||||
|  | ||||
|             if (availableProviders.length === 0) { | ||||
|                 throw new Error('No AI providers are available'); | ||||
|             if (!selectedProvider) { | ||||
|                 throw new Error('No AI provider is selected. Please select a provider in your AI settings.'); | ||||
|             } | ||||
|  | ||||
|             // Get the first available provider and its default model | ||||
|             const defaultProvider = availableProviders[0]; | ||||
|             const defaultModel = await getDefaultModelForProvider(defaultProvider); | ||||
|             // Check if the provider is available | ||||
|             if (!aiServiceManager.isProviderAvailable(selectedProvider)) { | ||||
|                 throw new Error(`Selected provider ${selectedProvider} is not available`); | ||||
|             } | ||||
|  | ||||
|             // Get the default model for the selected provider | ||||
|             const defaultModel = await getDefaultModelForProvider(selectedProvider); | ||||
|  | ||||
|             if (!defaultModel) { | ||||
|                 throw new Error(`No default model configured for provider ${defaultProvider}. Please configure a default model in your AI settings.`); | ||||
|                 throw new Error(`No default model configured for provider ${selectedProvider}. Please configure a default model in your AI settings.`); | ||||
|             } | ||||
|  | ||||
|             // Set provider metadata | ||||
|             if (!input.options.providerMetadata) { | ||||
|                 input.options.providerMetadata = { | ||||
|                     provider: defaultProvider as 'openai' | 'anthropic' | 'ollama' | 'local', | ||||
|                     provider: selectedProvider as 'openai' | 'anthropic' | 'ollama' | 'local', | ||||
|                     modelId: defaultModel | ||||
|                 }; | ||||
|             } | ||||
|  | ||||
|             log.info(`Selected default model ${defaultModel} from provider ${defaultProvider}`); | ||||
|             log.info(`Selected default model ${defaultModel} from provider ${selectedProvider}`); | ||||
|             return defaultModel; | ||||
|         } catch (error) { | ||||
|             log.error(`Error determining default model: ${error}`); | ||||
| @@ -271,4 +277,126 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput, | ||||
|             return MODEL_CAPABILITIES['default'].contextWindowTokens; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetch available models from provider and set a default model | ||||
|      */ | ||||
|     private async fetchAndSetDefaultModel(provider: ProviderType): Promise<string | null> { | ||||
|         try { | ||||
|             log.info(`Fetching available models for provider ${provider}`); | ||||
|              | ||||
|             // Import server-side options to update the default model | ||||
|             const optionService = (await import('../../../options.js')).default; | ||||
|              | ||||
|             switch (provider) { | ||||
|                 case 'openai': | ||||
|                     const openaiModels = await this.fetchOpenAIModels(); | ||||
|                     if (openaiModels.length > 0) { | ||||
|                         // Use the first available model without any preferences | ||||
|                         const selectedModel = openaiModels[0]; | ||||
|                          | ||||
|                         await optionService.setOption('openaiDefaultModel', selectedModel); | ||||
|                         log.info(`Set default OpenAI model to: ${selectedModel}`); | ||||
|                         return selectedModel; | ||||
|                     } | ||||
|                     break; | ||||
|                      | ||||
|                 case 'anthropic': | ||||
|                     const anthropicModels = await this.fetchAnthropicModels(); | ||||
|                     if (anthropicModels.length > 0) { | ||||
|                         // Use the first available model without any preferences | ||||
|                         const selectedModel = anthropicModels[0]; | ||||
|                          | ||||
|                         await optionService.setOption('anthropicDefaultModel', selectedModel); | ||||
|                         log.info(`Set default Anthropic model to: ${selectedModel}`); | ||||
|                         return selectedModel; | ||||
|                     } | ||||
|                     break; | ||||
|                      | ||||
|                 case 'ollama': | ||||
|                     const ollamaModels = await this.fetchOllamaModels(); | ||||
|                     if (ollamaModels.length > 0) { | ||||
|                         // Use the first available model without any preferences | ||||
|                         const selectedModel = ollamaModels[0]; | ||||
|                          | ||||
|                         await optionService.setOption('ollamaDefaultModel', selectedModel); | ||||
|                         log.info(`Set default Ollama model to: ${selectedModel}`); | ||||
|                         return selectedModel; | ||||
|                     } | ||||
|                     break; | ||||
|             } | ||||
|              | ||||
|             log.info(`No models available for provider ${provider}`); | ||||
|             return null; | ||||
|         } catch (error) { | ||||
|             log.error(`Error fetching models for provider ${provider}: ${error}`); | ||||
|             return null; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetch available OpenAI models | ||||
|      */ | ||||
|     private async fetchOpenAIModels(): Promise<string[]> { | ||||
|         try { | ||||
|             // Use the provider service to get available models | ||||
|             const aiServiceManager = (await import('../../ai_service_manager.js')).default; | ||||
|             const service = aiServiceManager.getInstance().getService('openai'); | ||||
|              | ||||
|             if (service && typeof (service as any).getAvailableModels === 'function') { | ||||
|                 return await (service as any).getAvailableModels(); | ||||
|             } | ||||
|              | ||||
|             // No fallback - return empty array if models can't be fetched | ||||
|             log.info('OpenAI service does not support getAvailableModels method'); | ||||
|             return []; | ||||
|         } catch (error) { | ||||
|             log.error(`Error fetching OpenAI models: ${error}`); | ||||
|             return []; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetch available Anthropic models | ||||
|      */ | ||||
|     private async fetchAnthropicModels(): Promise<string[]> { | ||||
|         try { | ||||
|             // Use the provider service to get available models | ||||
|             const aiServiceManager = (await import('../../ai_service_manager.js')).default; | ||||
|             const service = aiServiceManager.getInstance().getService('anthropic'); | ||||
|              | ||||
|             if (service && typeof (service as any).getAvailableModels === 'function') { | ||||
|                 return await (service as any).getAvailableModels(); | ||||
|             } | ||||
|              | ||||
|             // No fallback - return empty array if models can't be fetched | ||||
|             log.info('Anthropic service does not support getAvailableModels method'); | ||||
|             return []; | ||||
|         } catch (error) { | ||||
|             log.error(`Error fetching Anthropic models: ${error}`); | ||||
|             return []; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Fetch available Ollama models | ||||
|      */ | ||||
|     private async fetchOllamaModels(): Promise<string[]> { | ||||
|         try { | ||||
|             // Use the provider service to get available models | ||||
|             const aiServiceManager = (await import('../../ai_service_manager.js')).default; | ||||
|             const service = aiServiceManager.getInstance().getService('ollama'); | ||||
|              | ||||
|             if (service && typeof (service as any).getAvailableModels === 'function') { | ||||
|                 return await (service as any).getAvailableModels(); | ||||
|             } | ||||
|              | ||||
|             // No fallback - return empty array if models can't be fetched | ||||
|             log.info('Ollama service does not support getAvailableModels method'); | ||||
|             return []; | ||||
|         } catch (error) { | ||||
|             log.error(`Error fetching Ollama models: ${error}`); | ||||
|             return []; | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -606,4 +606,12 @@ export class AnthropicService extends BaseAIService { | ||||
|  | ||||
|         return convertedTools; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Clear cached Anthropic client to force recreation with new settings | ||||
|      */ | ||||
|     clearCache(): void { | ||||
|         this.client = null; | ||||
|         log.info('Anthropic client cache cleared'); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -526,4 +526,13 @@ export class OllamaService extends BaseAIService { | ||||
|         log.info(`Added tool execution feedback: ${toolExecutionStatus.length} statuses`); | ||||
|         return updatedMessages; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Clear cached Ollama client to force recreation with new settings | ||||
|      */ | ||||
|     clearCache(): void { | ||||
|         // Ollama service doesn't maintain a persistent client like OpenAI/Anthropic | ||||
|         // but we can clear any future cached state here if needed | ||||
|         log.info('Ollama client cache cleared (no persistent client to clear)'); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -257,4 +257,12 @@ export class OpenAIService extends BaseAIService { | ||||
|             throw error; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Clear cached OpenAI client to force recreation with new settings | ||||
|      */ | ||||
|     clearCache(): void { | ||||
|         this.openai = null; | ||||
|         log.info('OpenAI client cache cleared'); | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -195,26 +195,26 @@ const defaultOptions: DefaultOption[] = [ | ||||
|     // AI Options | ||||
|     { name: "aiEnabled", value: "false", isSynced: true }, | ||||
|     { name: "openaiApiKey", value: "", isSynced: false }, | ||||
|     { name: "openaiDefaultModel", value: "gpt-4o", isSynced: true }, | ||||
|     { name: "openaiEmbeddingModel", value: "text-embedding-3-small", isSynced: true }, | ||||
|     { name: "openaiDefaultModel", value: "", isSynced: true }, | ||||
|     { name: "openaiEmbeddingModel", value: "", isSynced: true }, | ||||
|     { name: "openaiBaseUrl", value: "https://api.openai.com/v1", isSynced: true }, | ||||
|     { name: "anthropicApiKey", value: "", isSynced: false }, | ||||
|     { name: "anthropicDefaultModel", value: "claude-3-opus-20240229", isSynced: true }, | ||||
|     { name: "voyageEmbeddingModel", value: "voyage-2", isSynced: true }, | ||||
|     { name: "anthropicDefaultModel", value: "", isSynced: true }, | ||||
|     { name: "voyageEmbeddingModel", value: "", isSynced: true }, | ||||
|     { name: "voyageApiKey", value: "", isSynced: false }, | ||||
|     { name: "anthropicBaseUrl", value: "https://api.anthropic.com/v1", isSynced: true }, | ||||
|     { name: "ollamaEnabled", value: "false", isSynced: true }, | ||||
|     { name: "ollamaDefaultModel", value: "llama3", isSynced: true }, | ||||
|     { name: "ollamaDefaultModel", value: "", isSynced: true }, | ||||
|     { name: "ollamaBaseUrl", value: "http://localhost:11434", isSynced: true }, | ||||
|     { name: "ollamaEmbeddingModel", value: "nomic-embed-text", isSynced: true }, | ||||
|     { name: "ollamaEmbeddingModel", value: "", isSynced: true }, | ||||
|     { name: "embeddingAutoUpdateEnabled", value: "true", isSynced: true }, | ||||
|  | ||||
|     // Adding missing AI options | ||||
|     { name: "aiTemperature", value: "0.7", isSynced: true }, | ||||
|     { name: "aiSystemPrompt", value: "", isSynced: true }, | ||||
|     { name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true }, | ||||
|     { name: "aiSelectedProvider", value: "openai", isSynced: true }, | ||||
|     { name: "embeddingDimensionStrategy", value: "auto", isSynced: true }, | ||||
|     { name: "embeddingProviderPrecedence", value: "openai,voyage,ollama,local", isSynced: true }, | ||||
|     { name: "embeddingSelectedProvider", value: "openai", isSynced: true }, | ||||
|     { name: "embeddingSimilarityThreshold", value: "0.75", isSynced: true }, | ||||
|     { name: "enableAutomaticIndexing", value: "true", isSynced: true }, | ||||
|     { name: "maxNotesPerLlmQuery", value: "3", isSynced: true }, | ||||
|   | ||||
| @@ -142,15 +142,14 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi | ||||
|     ollamaDefaultModel: string; | ||||
|     ollamaEmbeddingModel: string; | ||||
|     codeOpenAiModel: string; | ||||
|     aiProviderPrecedence: string; | ||||
|     aiSelectedProvider: string; | ||||
|  | ||||
|     // Embedding-related options | ||||
|     embeddingAutoUpdateEnabled: boolean; | ||||
|     embeddingUpdateInterval: number; | ||||
|     embeddingBatchSize: number; | ||||
|     embeddingDefaultDimension: number; | ||||
|     embeddingsDefaultProvider: string; | ||||
|     embeddingProviderPrecedence: string; | ||||
|     embeddingSelectedProvider: string; | ||||
|     enableAutomaticIndexing: boolean; | ||||
|     embeddingGenerationLocation: string; | ||||
|     embeddingDimensionStrategy: string; | ||||
|   | ||||
		Reference in New Issue
	
	Block a user