forked from managedcode/graphrag
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathTokenTextChunker.cs
More file actions
84 lines (67 loc) · 2.61 KB
/
TokenTextChunker.cs
File metadata and controls
84 lines (67 loc) · 2.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
using System.Buffers;
using System.Runtime.InteropServices;
using GraphRag.Config;
using GraphRag.Tokenization;
namespace GraphRag.Chunking;
public sealed class TokenTextChunker : ITextChunker
{
public IReadOnlyList<TextChunk> Chunk(IReadOnlyList<ChunkSlice> slices, ChunkingConfig config)
{
ArgumentNullException.ThrowIfNull(slices);
ArgumentNullException.ThrowIfNull(config);
if (slices.Count == 0)
{
return [];
}
var tokenizer = TokenizerRegistry.GetTokenizer(config.EncodingModel);
var flattened = new List<(int SliceIndex, int Token)>();
for (var index = 0; index < slices.Count; index++)
{
var slice = slices[index];
var encoded = tokenizer.EncodeToIds(slice.Text.AsSpan());
for (var i = 0; i < encoded.Count; i++)
{
var token = encoded[i];
flattened.Add((index, token));
}
}
if (flattened.Count == 0)
{
return [];
}
var chunkSize = Math.Max(1, config.Size);
var overlap = Math.Clamp(config.Overlap, 0, chunkSize - 1);
var step = chunkSize - overlap;
var estimatedChunks = (flattened.Count + step - 1) / step;
var results = new List<TextChunk>(estimatedChunks);
var documentIds = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var start = 0;
while (start < flattened.Count)
{
var end = Math.Min(flattened.Count, start + chunkSize);
var chunkTokens = CollectionsMarshal.AsSpan(flattened).Slice(start, end - start);
var tokenValues = ArrayPool<int>.Shared.Rent(chunkTokens.Length);
documentIds.Clear();
var lastSliceIndex = -1;
for (var i = 0; i < chunkTokens.Length; i++)
{
var sliceIndex = chunkTokens[i].SliceIndex;
tokenValues[i] = chunkTokens[i].Token;
if (sliceIndex != lastSliceIndex)
{
documentIds.Add(slices[sliceIndex].DocumentId);
lastSliceIndex = sliceIndex;
}
}
var decoded = tokenizer.Decode(new ArraySegment<int>(tokenValues, 0, chunkTokens.Length));
results.Add(new TextChunk(documentIds.ToList(), decoded, chunkTokens.Length));
ArrayPool<int>.Shared.Return(tokenValues);
if (end >= flattened.Count)
{
break;
}
start = Math.Max(start + chunkSize - overlap, start + 1);
}
return results;
}
}