-
Notifications
You must be signed in to change notification settings - Fork 9
Expand file tree
/
Copy pathtypes.ts
More file actions
54 lines (48 loc) · 1.67 KB
/
types.ts
File metadata and controls
54 lines (48 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
export interface EmbeddingProvider {
readonly name: string;
readonly modelName: string;
readonly dimensions: number;
initialize(): Promise<void>;
embed(text: string): Promise<number[]>;
embedBatch(texts: string[]): Promise<number[][]>;
isReady(): boolean;
}
export interface EmbeddingConfig {
provider: 'transformers' | 'ollama' | 'openai' | 'custom';
model?: string;
batchSize?: number;
maxRetries?: number;
apiKey?: string;
apiEndpoint?: string;
}
export function parseEmbeddingProviderName(
value: unknown
): EmbeddingConfig['provider'] | undefined {
if (value === 'transformers' || value === 'ollama' || value === 'openai' || value === 'custom') {
return value;
}
return undefined;
}
// Default: bge-small (fast, ~2min indexing, consumer-hardware safe)
// Opt-in: set EMBEDDING_MODEL=onnx-community/granite-embedding-small-english-r2-ONNX for
// better conceptual search at the cost of 5-10x slower indexing and higher RAM usage
export const DEFAULT_MODEL = process.env.EMBEDDING_MODEL || 'Xenova/bge-small-en-v1.5';
function getDefaultApiEndpoint(provider: EmbeddingConfig['provider']): string | undefined {
if (provider === 'ollama') {
return process.env.OLLAMA_HOST || 'http://localhost:11434';
}
if (provider === 'openai') {
return process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
}
return undefined;
}
export const DEFAULT_EMBEDDING_CONFIG: EmbeddingConfig = {
provider: parseEmbeddingProviderName(process.env.EMBEDDING_PROVIDER) ?? 'transformers',
model: DEFAULT_MODEL,
batchSize: 32,
maxRetries: 3,
apiKey: process.env.OPENAI_API_KEY,
get apiEndpoint() {
return getDefaultApiEndpoint(this.provider);
}
};