Skip to content

Commit 901ed3b

Browse files
committed
fix(cache-with-ttl): cap in-memory memo layer with LRU eviction
`memoCache` was a plain `Map` with no size limit and no eviction. Every `get()` cache-hit, every `set()`, and every `getAll()` wrote to it unconditionally; expired entries were only reclaimed when the same key was read again. In a long-lived process (devserver, VS Code extension, daemon) that queries many distinct keys, this grows without bound — the docstring explicitly calls memoCache "in-memory memoization for hot data" but the implementation kept every key it had ever seen. Adds a `memoMaxSize` option (default 1000) and routes all memo writes through a `memoSet` helper that evicts the least-recently-used entry (oldest Map insertion) when the cap is reached. Memo hits in `get()` re-insert to bump recency so hot keys survive churn. The persistent (cacache) layer is unaffected.
1 parent 0b85ec9 commit 901ed3b

1 file changed

Lines changed: 36 additions & 4 deletions

File tree

src/cache-with-ttl.ts

Lines changed: 36 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,13 @@ export interface TtlCacheOptions {
132132
* @default true
133133
*/
134134
memoize?: boolean | undefined
135+
/**
136+
* Maximum number of entries to keep in the in-memory memo cache. When
137+
* exceeded, the least-recently-used entry is evicted. The persistent
138+
* (cacache) layer is unaffected.
139+
* @default 1000
140+
*/
141+
memoMaxSize?: number | undefined
135142
/**
136143
* Custom cache key prefix.
137144
* Must not contain wildcards (*).
@@ -155,6 +162,11 @@ export interface TtlCacheOptions {
155162
// 5 minutes
156163
const DEFAULT_TTL_MS = 5 * 60 * 1000
157164
const DEFAULT_PREFIX = 'ttl-cache'
165+
// Cap the in-memory memoization layer. Without this, a long-running
166+
// daemon (devserver, editor extension) that queries many distinct keys
167+
// accumulates entries forever — expired entries are only reclaimed when
168+
// that exact key is read again. Cacache on disk is unaffected.
169+
const DEFAULT_MEMO_MAX_SIZE = 1000
158170

159171
/**
160172
* Create a TTL-based cache instance.
@@ -170,6 +182,7 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
170182
const opts = {
171183
__proto__: null,
172184
memoize: true,
185+
memoMaxSize: DEFAULT_MEMO_MAX_SIZE,
173186
prefix: DEFAULT_PREFIX,
174187
ttl: DEFAULT_TTL_MS,
175188
...options,
@@ -182,8 +195,25 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
182195
)
183196
}
184197

185-
// In-memory cache for hot data
198+
// In-memory cache for hot data. Capped via opts.memoMaxSize using a
199+
// Map's insertion-order semantics as the LRU list: `memoSet` deletes
200+
// the key first so a re-insert moves it to the tail, and when size
201+
// exceeds the cap we evict the oldest entry (first key in iteration).
186202
const memoCache = new Map<string, TtlCacheEntry<unknown>>()
203+
const memoMaxSize = Math.max(1, opts.memoMaxSize ?? DEFAULT_MEMO_MAX_SIZE)
204+
205+
function memoSet(fullKey: string, entry: TtlCacheEntry<unknown>): void {
206+
if (memoCache.has(fullKey)) {
207+
memoCache.delete(fullKey)
208+
} else if (memoCache.size >= memoMaxSize) {
209+
// Evict the least-recently-used entry (oldest insertion).
210+
const oldest = memoCache.keys().next().value
211+
if (oldest !== undefined) {
212+
memoCache.delete(oldest)
213+
}
214+
}
215+
memoCache.set(fullKey, entry)
216+
}
187217

188218
// Ensure ttl is defined
189219
const ttl = opts.ttl ?? DEFAULT_TTL_MS
@@ -248,6 +278,8 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
248278
if (opts.memoize) {
249279
const memoEntry = memoCache.get(fullKey)
250280
if (memoEntry && !isExpired(memoEntry)) {
281+
// Bump recency so the LRU eviction prefers colder entries.
282+
memoSet(fullKey, memoEntry)
251283
return memoEntry.data as T
252284
}
253285
// Remove expired memo entry.
@@ -274,7 +306,7 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
274306
if (!isExpired(entry)) {
275307
// Update in-memory cache.
276308
if (opts.memoize) {
277-
memoCache.set(fullKey, entry)
309+
memoSet(fullKey, entry)
278310
}
279311
return entry.data
280312
}
@@ -365,7 +397,7 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
365397

366398
// Update in-memory cache.
367399
if (opts.memoize) {
368-
memoCache.set(cacheEntry.key, parsed)
400+
memoSet(cacheEntry.key, parsed)
369401
}
370402
} catch {
371403
// Ignore parse errors or other issues.
@@ -395,7 +427,7 @@ export function createTtlCache(options?: TtlCacheOptions): TtlCache {
395427

396428
// Update in-memory cache first (synchronous and fast).
397429
if (opts.memoize) {
398-
memoCache.set(fullKey, entry)
430+
memoSet(fullKey, entry)
399431
}
400432

401433
// Update persistent cache (don't fail if this errors).

0 commit comments

Comments
 (0)