wip library to store cold objects in s3, warm objects on disk, and hot objects in memory
nodejs
typescript
1import { lru } from 'tiny-lru';
2import type { StorageTier, StorageMetadata, TierStats, TierGetResult } from '../types/index.js';
3
4interface CacheEntry {
5 data: Uint8Array;
6 metadata: StorageMetadata;
7 size: number;
8}
9
10/**
11 * Configuration for MemoryStorageTier.
12 */
13export interface MemoryStorageTierConfig {
14 /**
15 * Maximum total size in bytes.
16 *
17 * @remarks
18 * When this limit is reached, least-recently-used entries are evicted.
19 */
20 maxSizeBytes: number;
21
22 /**
23 * Maximum number of items.
24 *
25 * @remarks
26 * When this limit is reached, least-recently-used entries are evicted.
27 * Useful for limiting memory usage when items have variable sizes.
28 */
29 maxItems?: number;
30}
31
32/**
33 * In-memory storage tier using TinyLRU for LRU eviction.
34 *
35 * @remarks
36 * - Uses the battle-tested TinyLRU library for efficient LRU caching
37 * - Automatically evicts least-recently-used entries when limits are reached
38 * - Not distributed - single process only
39 * - Data is lost on process restart (use warm/cold tiers for persistence)
40 * - Implements both size-based and count-based eviction
41 *
42 * @example
43 * ```typescript
44 * const tier = new MemoryStorageTier({
45 * maxSizeBytes: 100 * 1024 * 1024, // 100MB
46 * maxItems: 1000,
47 * });
48 *
49 * await tier.set('key', data, metadata);
50 * const retrieved = await tier.get('key');
51 * ```
52 */
53export class MemoryStorageTier implements StorageTier {
54 private cache: ReturnType<typeof lru<CacheEntry>>;
55 private currentSize = 0;
56 private stats = {
57 hits: 0,
58 misses: 0,
59 evictions: 0,
60 };
61
62 constructor(private config: MemoryStorageTierConfig) {
63 if (config.maxSizeBytes <= 0) {
64 throw new Error('maxSizeBytes must be positive');
65 }
66 if (config.maxItems !== undefined && config.maxItems <= 0) {
67 throw new Error('maxItems must be positive');
68 }
69
70 // Initialize TinyLRU with max items (we'll handle size limits separately)
71 const maxItems = config.maxItems ?? 10000; // Default to 10k items if not specified
72 this.cache = lru<CacheEntry>(maxItems);
73 }
74
75 async get(key: string): Promise<Uint8Array | null> {
76 const entry = this.cache.get(key);
77
78 if (!entry) {
79 this.stats.misses++;
80 return null;
81 }
82
83 this.stats.hits++;
84 return entry.data;
85 }
86
87 /**
88 * Retrieve data and metadata together in a single cache lookup.
89 *
90 * @param key - The key to retrieve
91 * @returns The data and metadata, or null if not found
92 */
93 async getWithMetadata(key: string): Promise<TierGetResult | null> {
94 const entry = this.cache.get(key);
95
96 if (!entry) {
97 this.stats.misses++;
98 return null;
99 }
100
101 this.stats.hits++;
102 return { data: entry.data, metadata: entry.metadata };
103 }
104
105 async set(key: string, data: Uint8Array, metadata: StorageMetadata): Promise<void> {
106 const size = data.byteLength;
107
108 // Check existing entry for size accounting
109 const existing = this.cache.get(key);
110 if (existing) {
111 this.currentSize -= existing.size;
112 }
113
114 // Evict entries until we have space for the new entry
115 await this.evictIfNeeded(size);
116
117 // Add new entry
118 const entry: CacheEntry = { data, metadata, size };
119 this.cache.set(key, entry);
120 this.currentSize += size;
121 }
122
123 async delete(key: string): Promise<void> {
124 const entry = this.cache.get(key);
125 if (entry) {
126 this.cache.delete(key);
127 this.currentSize -= entry.size;
128 }
129 }
130
131 async exists(key: string): Promise<boolean> {
132 return this.cache.has(key);
133 }
134
135 async *listKeys(prefix?: string): AsyncIterableIterator<string> {
136 // TinyLRU doesn't expose keys(), so we need to track them separately
137 // For now, we'll use the cache's internal structure
138 const keys = this.cache.keys();
139 for (const key of keys) {
140 if (!prefix || key.startsWith(prefix)) {
141 yield key;
142 }
143 }
144 }
145
146 async deleteMany(keys: string[]): Promise<void> {
147 for (const key of keys) {
148 await this.delete(key);
149 }
150 }
151
152 async getMetadata(key: string): Promise<StorageMetadata | null> {
153 const entry = this.cache.get(key);
154 return entry ? entry.metadata : null;
155 }
156
157 async setMetadata(key: string, metadata: StorageMetadata): Promise<void> {
158 const entry = this.cache.get(key);
159 if (entry) {
160 // Update metadata in place
161 entry.metadata = metadata;
162 // Re-set to mark as recently used
163 this.cache.set(key, entry);
164 }
165 }
166
167 async getStats(): Promise<TierStats> {
168 return {
169 bytes: this.currentSize,
170 items: this.cache.size,
171 hits: this.stats.hits,
172 misses: this.stats.misses,
173 evictions: this.stats.evictions,
174 };
175 }
176
177 async clear(): Promise<void> {
178 this.cache.clear();
179 this.currentSize = 0;
180 }
181
182 /**
183 * Evict least-recently-used entries until there's space for new data.
184 *
185 * @param incomingSize - Size of data being added
186 *
187 * @remarks
188 * TinyLRU handles count-based eviction automatically.
189 * This method handles size-based eviction by using TinyLRU's built-in evict() method,
190 * which properly removes the LRU item without updating access order.
191 */
192 private async evictIfNeeded(incomingSize: number): Promise<void> {
193 // Keep evicting until we have enough space
194 while (this.currentSize + incomingSize > this.config.maxSizeBytes && this.cache.size > 0) {
195 // Get the LRU key (first in the list) without accessing it
196 const keys = this.cache.keys();
197 if (keys.length === 0) break;
198
199 const lruKey = keys[0];
200 if (!lruKey) break;
201
202 // Access the entry directly from internal items without triggering LRU update
203 // TinyLRU exposes items as a public property for this purpose
204 const entry = (this.cache as any).items[lruKey] as CacheEntry | undefined;
205 if (!entry) break;
206
207 // Use TinyLRU's built-in evict() which properly removes the LRU item
208 this.cache.evict();
209 this.currentSize -= entry.size;
210 this.stats.evictions++;
211 }
212 }
213}