mirror of
				https://github.com/zadam/trilium.git
				synced 2025-10-31 02:16:05 +01:00 
			
		
		
		
	adapt or regenerate embeddings - allows users to decide
This commit is contained in:
		| @@ -340,6 +340,15 @@ export default class AiSettingsWidget extends OptionsWidget { | |||||||
|                 <div class="form-text">${t("ai_llm.embedding_default_provider_description")}</div> |                 <div class="form-text">${t("ai_llm.embedding_default_provider_description")}</div> | ||||||
|             </div> |             </div> | ||||||
|  |  | ||||||
|  |             <div class="form-group"> | ||||||
|  |                 <label>${t("ai_llm.embedding_dimension_strategy")}</label> | ||||||
|  |                 <select class="embedding-dimension-strategy form-control"> | ||||||
|  |                     <option value="adapt">Adapt dimensions (faster)</option> | ||||||
|  |                     <option value="regenerate">Regenerate embeddings (more accurate)</option> | ||||||
|  |                 </select> | ||||||
|  |                 <div class="form-text">${t("ai_llm.embedding_dimension_strategy_description") || "Choose how to handle different embedding dimensions between providers. 'Adapt' is faster but less accurate, 'Regenerate' is more accurate but requires API calls."}</div> | ||||||
|  |             </div> | ||||||
|  |  | ||||||
|             <div class="form-group"> |             <div class="form-group"> | ||||||
|                 <label>${t("ai_llm.embedding_provider_precedence")}</label> |                 <label>${t("ai_llm.embedding_provider_precedence")}</label> | ||||||
|                 <input type="hidden" class="embedding-provider-precedence" value=""> |                 <input type="hidden" class="embedding-provider-precedence" value=""> | ||||||
| @@ -812,6 +821,11 @@ export default class AiSettingsWidget extends OptionsWidget { | |||||||
|             await this.displayValidationWarnings(); |             await this.displayValidationWarnings(); | ||||||
|         }); |         }); | ||||||
|  |  | ||||||
|  |         const $embeddingDimensionStrategy = this.$widget.find('.embedding-dimension-strategy'); | ||||||
|  |         $embeddingDimensionStrategy.on('change', async () => { | ||||||
|  |             await this.updateOption('embeddingDimensionStrategy', $embeddingDimensionStrategy.val() as string); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|         const $embeddingProviderPrecedence = this.$widget.find('.embedding-provider-precedence'); |         const $embeddingProviderPrecedence = this.$widget.find('.embedding-provider-precedence'); | ||||||
|         $embeddingProviderPrecedence.on('change', async () => { |         $embeddingProviderPrecedence.on('change', async () => { | ||||||
|             await this.updateOption('embeddingProviderPrecedence', $embeddingProviderPrecedence.val() as string); |             await this.updateOption('embeddingProviderPrecedence', $embeddingProviderPrecedence.val() as string); | ||||||
| @@ -1151,7 +1165,8 @@ export default class AiSettingsWidget extends OptionsWidget { | |||||||
|         this.$widget.find('.embedding-similarity-threshold').val(options.embeddingSimilarityThreshold || '0.65'); |         this.$widget.find('.embedding-similarity-threshold').val(options.embeddingSimilarityThreshold || '0.65'); | ||||||
|         this.$widget.find('.max-notes-per-llm-query').val(options.maxNotesPerLlmQuery || '10'); |         this.$widget.find('.max-notes-per-llm-query').val(options.maxNotesPerLlmQuery || '10'); | ||||||
|         this.$widget.find('.embedding-default-provider').val(options.embeddingsDefaultProvider || 'openai'); |         this.$widget.find('.embedding-default-provider').val(options.embeddingsDefaultProvider || 'openai'); | ||||||
|         this.$widget.find('.embedding-provider-precedence').val(options.embeddingProviderPrecedence || 'openai,ollama,anthropic'); |         this.$widget.find('.embedding-provider-precedence').val(options.embeddingProviderPrecedence || 'openai,ollama'); | ||||||
|  |         this.$widget.find('.embedding-dimension-strategy').val(options.embeddingDimensionStrategy || 'adapt'); | ||||||
|         this.$widget.find('.embedding-generation-location').val(options.embeddingGenerationLocation || 'client'); |         this.$widget.find('.embedding-generation-location').val(options.embeddingGenerationLocation || 'client'); | ||||||
|         this.$widget.find('.embedding-batch-size').val(options.embeddingBatchSize || '10'); |         this.$widget.find('.embedding-batch-size').val(options.embeddingBatchSize || '10'); | ||||||
|         this.$widget.find('.embedding-update-interval').val(options.embeddingUpdateInterval || '5000'); |         this.$widget.find('.embedding-update-interval').val(options.embeddingUpdateInterval || '5000'); | ||||||
|   | |||||||
| @@ -106,7 +106,8 @@ const ALLOWED_OPTIONS = new Set([ | |||||||
|     "embeddingSimilarityThreshold", |     "embeddingSimilarityThreshold", | ||||||
|     "maxNotesPerLlmQuery", |     "maxNotesPerLlmQuery", | ||||||
|     "enableAutomaticIndexing", |     "enableAutomaticIndexing", | ||||||
|     "embeddingGenerationLocation" |     "embeddingGenerationLocation", | ||||||
|  |     "embeddingDimensionStrategy" | ||||||
| ]); | ]); | ||||||
|  |  | ||||||
| function getOptions() { | function getOptions() { | ||||||
|   | |||||||
| @@ -165,18 +165,25 @@ export async function findSimilarNotes( | |||||||
|             log.info(`Available embeddings: ${JSON.stringify(availableEmbeddings.map(e => ({ |             log.info(`Available embeddings: ${JSON.stringify(availableEmbeddings.map(e => ({ | ||||||
|                 providerId: e.providerId, |                 providerId: e.providerId, | ||||||
|                 modelId: e.modelId, |                 modelId: e.modelId, | ||||||
|                 count: e.count |                 count: e.count, | ||||||
|  |                 dimension: e.dimension | ||||||
|             })))}`); |             })))}`); | ||||||
|  |  | ||||||
|             // Import the AIServiceManager to get provider precedence |             // Import the AIServiceManager to get provider precedence | ||||||
|             const { default: aiManager } = await import('../ai_service_manager.js'); |             const { default: aiManager } = await import('../ai_service_manager.js'); | ||||||
|  |  | ||||||
|  |             // Import vector utils for dimension adaptation | ||||||
|  |             const { adaptEmbeddingDimensions } = await import('./vector_utils.js'); | ||||||
|  |  | ||||||
|  |             // Get user dimension strategy preference | ||||||
|  |             const options = (await import('../../options.js')).default; | ||||||
|  |             const dimensionStrategy = await options.getOption('embeddingDimensionStrategy') || 'adapt'; | ||||||
|  |             log.info(`Using embedding dimension strategy: ${dimensionStrategy}`); | ||||||
|  |  | ||||||
|             // Get providers in user-defined precedence order |             // Get providers in user-defined precedence order | ||||||
|             // This uses the internal providerOrder property that's set from user preferences |  | ||||||
|             const availableProviderIds = availableEmbeddings.map(e => e.providerId); |             const availableProviderIds = availableEmbeddings.map(e => e.providerId); | ||||||
|  |  | ||||||
|             // Get dedicated embedding provider precedence from options |             // Get dedicated embedding provider precedence from options | ||||||
|             const options = (await import('../../options.js')).default; |  | ||||||
|             let preferredProviders: string[] = []; |             let preferredProviders: string[] = []; | ||||||
|  |  | ||||||
|             const embeddingPrecedence = await options.getOption('embeddingProviderPrecedence'); |             const embeddingPrecedence = await options.getOption('embeddingProviderPrecedence'); | ||||||
| @@ -215,53 +222,54 @@ export async function findSimilarNotes( | |||||||
|                 const providerEmbeddings = availableEmbeddings.filter(e => e.providerId === provider); |                 const providerEmbeddings = availableEmbeddings.filter(e => e.providerId === provider); | ||||||
|  |  | ||||||
|                 if (providerEmbeddings.length > 0) { |                 if (providerEmbeddings.length > 0) { | ||||||
|                     // Find models that match the current embedding's dimensions |                     // Use the model with the most embeddings | ||||||
|                     const dimensionMatchingModels = providerEmbeddings.filter(e => e.dimension === embedding.length); |                     const bestModel = providerEmbeddings.sort((a, b) => b.count - a.count)[0]; | ||||||
|  |                     log.info(`Found fallback provider: ${provider}, model: ${bestModel.modelId}, dimension: ${bestModel.dimension}`); | ||||||
|  |  | ||||||
|                     // If we have models with matching dimensions, use the one with most embeddings |                     if (dimensionStrategy === 'adapt') { | ||||||
|                     if (dimensionMatchingModels.length > 0) { |                         // Dimension adaptation strategy (simple truncation/padding) | ||||||
|                         const bestModel = dimensionMatchingModels.sort((a, b) => b.count - a.count)[0]; |                         const adaptedEmbedding = adaptEmbeddingDimensions(embedding, bestModel.dimension); | ||||||
|                         log.info(`Found fallback provider with matching dimensions (${embedding.length}): ${provider}, model: ${bestModel.modelId}`); |                         log.info(`Adapted query embedding from dimension ${embedding.length} to ${adaptedEmbedding.length}`); | ||||||
|  |  | ||||||
|                         // Recursive call with the new provider/model, but disable further fallbacks |                         // Use the adapted embedding with the fallback provider | ||||||
|                         return findSimilarNotes( |                         return findSimilarNotes( | ||||||
|                             embedding, |                             adaptedEmbedding, | ||||||
|                             provider, |                             provider, | ||||||
|                             bestModel.modelId, |                             bestModel.modelId, | ||||||
|                             limit, |                             limit, | ||||||
|                             threshold, |                             threshold, | ||||||
|                             false // Prevent infinite recursion |                             false // Prevent infinite recursion | ||||||
|                         ); |                         ); | ||||||
|                     } else { |                     } | ||||||
|                         // We need to regenerate embeddings with the new provider |                     else if (dimensionStrategy === 'regenerate') { | ||||||
|                         log.info(`No models with matching dimensions found for ${provider}. Available models: ${JSON.stringify( |                         // Regeneration strategy (regenerate embedding with fallback provider) | ||||||
|                             providerEmbeddings.map(e => ({ model: e.modelId, dimension: e.dimension })) |  | ||||||
|                         )}`); |  | ||||||
|  |  | ||||||
|                         try { |                         try { | ||||||
|                             // Import provider manager to get a provider instance |                             // Import provider manager to get a provider instance | ||||||
|                             const { default: providerManager } = await import('./providers.js'); |                             const { default: providerManager } = await import('./providers.js'); | ||||||
|                             const providerInstance = providerManager.getEmbeddingProvider(provider); |                             const providerInstance = providerManager.getEmbeddingProvider(provider); | ||||||
|  |  | ||||||
|                             if (providerInstance) { |                             if (providerInstance) { | ||||||
|                                 // Use the model with the most embeddings |                                 // Try to get the original query text | ||||||
|                                 const bestModel = providerEmbeddings.sort((a, b) => b.count - a.count)[0]; |                                 // This is a challenge - ideally we would have the original query | ||||||
|                                 // Configure the model by setting it in the config |                                 // For now, we'll use a global cache to store recent queries | ||||||
|                                 try { |                                 interface CustomGlobal { | ||||||
|                                     // Access the config safely through the getConfig method |                                     recentEmbeddingQueries?: Record<string, string>; | ||||||
|  |                                 } | ||||||
|  |                                 const globalWithCache = global as unknown as CustomGlobal; | ||||||
|  |                                 const recentQueries = globalWithCache.recentEmbeddingQueries || {}; | ||||||
|  |                                 const embeddingKey = embedding.toString().substring(0, 100); | ||||||
|  |                                 const originalQuery = recentQueries[embeddingKey]; | ||||||
|  |  | ||||||
|  |                                 if (originalQuery) { | ||||||
|  |                                     log.info(`Found original query "${originalQuery}" for regeneration with ${provider}`); | ||||||
|  |  | ||||||
|  |                                     // Configure the model | ||||||
|                                     const config = providerInstance.getConfig(); |                                     const config = providerInstance.getConfig(); | ||||||
|                                     config.model = bestModel.modelId; |                                     config.model = bestModel.modelId; | ||||||
|  |  | ||||||
|                                     log.info(`Trying to convert query to ${provider}/${bestModel.modelId} embedding format (dimension: ${bestModel.dimension})`); |  | ||||||
|  |  | ||||||
|                                     // Get the original query from the embedding cache if possible, or use a placeholder |  | ||||||
|                                     // This is a hack - ideally we'd pass the query text through the whole chain |  | ||||||
|                                     const originalQuery = "query"; // This is a placeholder, we'd need the original query text |  | ||||||
|  |  | ||||||
|                                     // Generate a new embedding with the fallback provider |                                     // Generate a new embedding with the fallback provider | ||||||
|                                     const newEmbedding = await providerInstance.generateEmbeddings(originalQuery); |                                     const newEmbedding = await providerInstance.generateEmbeddings(originalQuery); | ||||||
|  |                                     log.info(`Successfully regenerated embedding with provider ${provider}/${bestModel.modelId} (dimension: ${newEmbedding.length})`); | ||||||
|                                     log.info(`Successfully generated new embedding with provider ${provider}/${bestModel.modelId} (dimension: ${newEmbedding.length})`); |  | ||||||
|  |  | ||||||
|                                     // Now try finding similar notes with the new embedding |                                     // Now try finding similar notes with the new embedding | ||||||
|                                     return findSimilarNotes( |                                     return findSimilarNotes( | ||||||
| @@ -272,18 +280,38 @@ export async function findSimilarNotes( | |||||||
|                                         threshold, |                                         threshold, | ||||||
|                                         false // Prevent infinite recursion |                                         false // Prevent infinite recursion | ||||||
|                                     ); |                                     ); | ||||||
|                                 } catch (configErr: any) { |                                 } else { | ||||||
|                                     log.error(`Error configuring provider ${provider}: ${configErr.message}`); |                                     log.info(`Original query not found for regeneration, falling back to adaptation`); | ||||||
|  |                                     // Fall back to adaptation if we can't find the original query | ||||||
|  |                                     const adaptedEmbedding = adaptEmbeddingDimensions(embedding, bestModel.dimension); | ||||||
|  |                                     return findSimilarNotes( | ||||||
|  |                                         adaptedEmbedding, | ||||||
|  |                                         provider, | ||||||
|  |                                         bestModel.modelId, | ||||||
|  |                                         limit, | ||||||
|  |                                         threshold, | ||||||
|  |                                         false | ||||||
|  |                                     ); | ||||||
|                                 } |                                 } | ||||||
|                             } |                             } | ||||||
|                         } catch (err: any) { |                         } catch (err: any) { | ||||||
|                             log.error(`Error converting embedding format: ${err.message}`); |                             log.error(`Error regenerating embedding: ${err.message}`); | ||||||
|  |                             // Fall back to adaptation on error | ||||||
|  |                             const adaptedEmbedding = adaptEmbeddingDimensions(embedding, bestModel.dimension); | ||||||
|  |                             return findSimilarNotes( | ||||||
|  |                                 adaptedEmbedding, | ||||||
|  |                                 provider, | ||||||
|  |                                 bestModel.modelId, | ||||||
|  |                                 limit, | ||||||
|  |                                 threshold, | ||||||
|  |                                 false | ||||||
|  |                             ); | ||||||
|                         } |                         } | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|  |  | ||||||
|             log.error(`No suitable fallback providers found with compatible dimensions. Current embedding dimension: ${embedding.length}`); |             log.error(`No suitable fallback providers found. Current embedding dimension: ${embedding.length}`); | ||||||
|             log.info(`Available embeddings: ${JSON.stringify(availableEmbeddings.map(e => ({ |             log.info(`Available embeddings: ${JSON.stringify(availableEmbeddings.map(e => ({ | ||||||
|                 providerId: e.providerId, |                 providerId: e.providerId, | ||||||
|                 modelId: e.modelId, |                 modelId: e.modelId, | ||||||
| @@ -307,13 +335,8 @@ export async function findSimilarNotes( | |||||||
|         const rowData = row as any; |         const rowData = row as any; | ||||||
|         const rowEmbedding = bufferToEmbedding(rowData.embedding, rowData.dimension); |         const rowEmbedding = bufferToEmbedding(rowData.embedding, rowData.dimension); | ||||||
|  |  | ||||||
|         // Check if dimensions match before calculating similarity |  | ||||||
|         if (rowEmbedding.length !== embedding.length) { |  | ||||||
|             log.info(`Skipping embedding ${rowData.embedId} - dimension mismatch: ${rowEmbedding.length} vs ${embedding.length}`); |  | ||||||
|             continue; |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         try { |         try { | ||||||
|  |             // cosineSimilarity will automatically adapt dimensions if needed | ||||||
|             const similarity = cosineSimilarity(embedding, rowEmbedding); |             const similarity = cosineSimilarity(embedding, rowEmbedding); | ||||||
|             similarities.push({ |             similarities.push({ | ||||||
|                 noteId: rowData.noteId, |                 noteId: rowData.noteId, | ||||||
|   | |||||||
| @@ -1,9 +1,11 @@ | |||||||
| /** | /** | ||||||
|  * Computes the cosine similarity between two vectors |  * Computes the cosine similarity between two vectors | ||||||
|  |  * If dimensions don't match, automatically adapts the first vector to match the second | ||||||
|  */ |  */ | ||||||
| export function cosineSimilarity(a: Float32Array, b: Float32Array): number { | export function cosineSimilarity(a: Float32Array, b: Float32Array): number { | ||||||
|  |     // If dimensions don't match, adapt 'a' to match 'b' | ||||||
|     if (a.length !== b.length) { |     if (a.length !== b.length) { | ||||||
|         throw new Error(`Vector dimensions don't match: ${a.length} vs ${b.length}`); |         a = adaptEmbeddingDimensions(a, b.length); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     let dotProduct = 0; |     let dotProduct = 0; | ||||||
| @@ -26,6 +28,52 @@ export function cosineSimilarity(a: Float32Array, b: Float32Array): number { | |||||||
|     return dotProduct / (aMagnitude * bMagnitude); |     return dotProduct / (aMagnitude * bMagnitude); | ||||||
| } | } | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Adapts an embedding to match target dimensions | ||||||
|  |  * Uses a simple truncation (if source is larger) or zero-padding (if source is smaller) | ||||||
|  |  * | ||||||
|  |  * @param sourceEmbedding The original embedding | ||||||
|  |  * @param targetDimension The desired dimension | ||||||
|  |  * @returns A new embedding with the target dimensions | ||||||
|  |  */ | ||||||
|  | export function adaptEmbeddingDimensions(sourceEmbedding: Float32Array, targetDimension: number): Float32Array { | ||||||
|  |     const sourceDimension = sourceEmbedding.length; | ||||||
|  |  | ||||||
|  |     // If dimensions already match, return the original | ||||||
|  |     if (sourceDimension === targetDimension) { | ||||||
|  |         return sourceEmbedding; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Create a new embedding with target dimensions | ||||||
|  |     const adaptedEmbedding = new Float32Array(targetDimension); | ||||||
|  |  | ||||||
|  |     if (sourceDimension < targetDimension) { | ||||||
|  |         // If source is smaller, copy all values and pad with zeros | ||||||
|  |         adaptedEmbedding.set(sourceEmbedding); | ||||||
|  |         // Rest of the array is already initialized to zeros | ||||||
|  |     } else { | ||||||
|  |         // If source is larger, truncate to target dimension | ||||||
|  |         for (let i = 0; i < targetDimension; i++) { | ||||||
|  |             adaptedEmbedding[i] = sourceEmbedding[i]; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Normalize the adapted embedding to maintain unit length | ||||||
|  |     let magnitude = 0; | ||||||
|  |     for (let i = 0; i < targetDimension; i++) { | ||||||
|  |         magnitude += adaptedEmbedding[i] * adaptedEmbedding[i]; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     magnitude = Math.sqrt(magnitude); | ||||||
|  |     if (magnitude > 0) { | ||||||
|  |         for (let i = 0; i < targetDimension; i++) { | ||||||
|  |             adaptedEmbedding[i] /= magnitude; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return adaptedEmbedding; | ||||||
|  | } | ||||||
|  |  | ||||||
| /** | /** | ||||||
|  * Converts embedding Float32Array to Buffer for storage in SQLite |  * Converts embedding Float32Array to Buffer for storage in SQLite | ||||||
|  */ |  */ | ||||||
|   | |||||||
| @@ -543,6 +543,27 @@ class IndexService { | |||||||
|             const embedding = await provider.generateEmbeddings(query); |             const embedding = await provider.generateEmbeddings(query); | ||||||
|             log.info(`Generated embedding for query: "${query}" (${embedding.length} dimensions)`); |             log.info(`Generated embedding for query: "${query}" (${embedding.length} dimensions)`); | ||||||
|  |  | ||||||
|  |             // Store query text in a global cache for possible regeneration with different providers | ||||||
|  |             // Use a type declaration to avoid TypeScript errors | ||||||
|  |             interface CustomGlobal { | ||||||
|  |                 recentEmbeddingQueries?: Record<string, string>; | ||||||
|  |             } | ||||||
|  |             const globalWithCache = global as unknown as CustomGlobal; | ||||||
|  |  | ||||||
|  |             if (!globalWithCache.recentEmbeddingQueries) { | ||||||
|  |                 globalWithCache.recentEmbeddingQueries = {}; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             // Use a substring of the embedding as a key (full embedding is too large) | ||||||
|  |             const embeddingKey = embedding.toString().substring(0, 100); | ||||||
|  |             globalWithCache.recentEmbeddingQueries[embeddingKey] = query; | ||||||
|  |  | ||||||
|  |             // Limit cache size to prevent memory leaks (keep max 50 recent queries) | ||||||
|  |             const keys = Object.keys(globalWithCache.recentEmbeddingQueries); | ||||||
|  |             if (keys.length > 50) { | ||||||
|  |                 delete globalWithCache.recentEmbeddingQueries[keys[0]]; | ||||||
|  |             } | ||||||
|  |  | ||||||
|             // Get Note IDs to search, optionally filtered by branch |             // Get Note IDs to search, optionally filtered by branch | ||||||
|             let similarNotes = []; |             let similarNotes = []; | ||||||
|  |  | ||||||
|   | |||||||
| @@ -189,7 +189,8 @@ const defaultOptions: DefaultOption[] = [ | |||||||
|     { name: "aiSystemPrompt", value: "", isSynced: true }, |     { name: "aiSystemPrompt", value: "", isSynced: true }, | ||||||
|     { name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true }, |     { name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true }, | ||||||
|     { name: "embeddingsDefaultProvider", value: "openai", isSynced: true }, |     { name: "embeddingsDefaultProvider", value: "openai", isSynced: true }, | ||||||
|     { name: "embeddingProviderPrecedence", value: "openai,ollama,anthropic", isSynced: true }, |     { name: "embeddingProviderPrecedence", value: "openai,ollama", isSynced: true }, | ||||||
|  |     { name: "embeddingDimensionStrategy", value: "adapt", isSynced: true }, | ||||||
|     { name: "enableAutomaticIndexing", value: "true", isSynced: true }, |     { name: "enableAutomaticIndexing", value: "true", isSynced: true }, | ||||||
|     { name: "embeddingSimilarityThreshold", value: "0.65", isSynced: true }, |     { name: "embeddingSimilarityThreshold", value: "0.65", isSynced: true }, | ||||||
|     { name: "maxNotesPerLlmQuery", value: "10", isSynced: true }, |     { name: "maxNotesPerLlmQuery", value: "10", isSynced: true }, | ||||||
|   | |||||||
| @@ -77,6 +77,7 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi | |||||||
|     embeddingSimilarityThreshold: string; |     embeddingSimilarityThreshold: string; | ||||||
|     maxNotesPerLlmQuery: string; |     maxNotesPerLlmQuery: string; | ||||||
|     embeddingGenerationLocation: string; |     embeddingGenerationLocation: string; | ||||||
|  |     embeddingDimensionStrategy: string; // 'adapt' or 'regenerate' | ||||||
|  |  | ||||||
|     lastSyncedPull: number; |     lastSyncedPull: number; | ||||||
|     lastSyncedPush: number; |     lastSyncedPush: number; | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user