wip library to store cold objects in s3, warm objects on disk, and hot objects in memory
nodejs
typescript
1import { lru } from 'tiny-lru';
2import type { StorageTier, StorageMetadata, TierStats } from '../types/index.js';
3
4interface CacheEntry {
5 data: Uint8Array;
6 metadata: StorageMetadata;
7 size: number;
8}
9
10/**
11 * Configuration for MemoryStorageTier.
12 */
13export interface MemoryStorageTierConfig {
14 /**
15 * Maximum total size in bytes.
16 *
17 * @remarks
18 * When this limit is reached, least-recently-used entries are evicted.
19 */
20 maxSizeBytes: number;
21
22 /**
23 * Maximum number of items.
24 *
25 * @remarks
26 * When this limit is reached, least-recently-used entries are evicted.
27 * Useful for limiting memory usage when items have variable sizes.
28 */
29 maxItems?: number;
30}
31
32/**
33 * In-memory storage tier using TinyLRU for LRU eviction.
34 *
35 * @remarks
36 * - Uses the battle-tested TinyLRU library for efficient LRU caching
37 * - Automatically evicts least-recently-used entries when limits are reached
38 * - Not distributed - single process only
39 * - Data is lost on process restart (use warm/cold tiers for persistence)
40 * - Implements both size-based and count-based eviction
41 *
42 * @example
43 * ```typescript
44 * const tier = new MemoryStorageTier({
45 * maxSizeBytes: 100 * 1024 * 1024, // 100MB
46 * maxItems: 1000,
47 * });
48 *
49 * await tier.set('key', data, metadata);
50 * const retrieved = await tier.get('key');
51 * ```
52 */
53export class MemoryStorageTier implements StorageTier {
54 private cache: ReturnType<typeof lru<CacheEntry>>;
55 private currentSize = 0;
56 private stats = {
57 hits: 0,
58 misses: 0,
59 evictions: 0,
60 };
61
62 constructor(private config: MemoryStorageTierConfig) {
63 if (config.maxSizeBytes <= 0) {
64 throw new Error('maxSizeBytes must be positive');
65 }
66 if (config.maxItems !== undefined && config.maxItems <= 0) {
67 throw new Error('maxItems must be positive');
68 }
69
70 // Initialize TinyLRU with max items (we'll handle size limits separately)
71 const maxItems = config.maxItems ?? 10000; // Default to 10k items if not specified
72 this.cache = lru<CacheEntry>(maxItems);
73 }
74
75 async get(key: string): Promise<Uint8Array | null> {
76 const entry = this.cache.get(key);
77
78 if (!entry) {
79 this.stats.misses++;
80 return null;
81 }
82
83 this.stats.hits++;
84 return entry.data;
85 }
86
87 async set(key: string, data: Uint8Array, metadata: StorageMetadata): Promise<void> {
88 const size = data.byteLength;
89
90 // Check existing entry for size accounting
91 const existing = this.cache.get(key);
92 if (existing) {
93 this.currentSize -= existing.size;
94 }
95
96 // Evict entries until we have space for the new entry
97 await this.evictIfNeeded(size);
98
99 // Add new entry
100 const entry: CacheEntry = { data, metadata, size };
101 this.cache.set(key, entry);
102 this.currentSize += size;
103 }
104
105 async delete(key: string): Promise<void> {
106 const entry = this.cache.get(key);
107 if (entry) {
108 this.cache.delete(key);
109 this.currentSize -= entry.size;
110 }
111 }
112
113 async exists(key: string): Promise<boolean> {
114 return this.cache.has(key);
115 }
116
117 async *listKeys(prefix?: string): AsyncIterableIterator<string> {
118 // TinyLRU doesn't expose keys(), so we need to track them separately
119 // For now, we'll use the cache's internal structure
120 const keys = this.cache.keys();
121 for (const key of keys) {
122 if (!prefix || key.startsWith(prefix)) {
123 yield key;
124 }
125 }
126 }
127
128 async deleteMany(keys: string[]): Promise<void> {
129 for (const key of keys) {
130 await this.delete(key);
131 }
132 }
133
134 async getMetadata(key: string): Promise<StorageMetadata | null> {
135 const entry = this.cache.get(key);
136 return entry ? entry.metadata : null;
137 }
138
139 async setMetadata(key: string, metadata: StorageMetadata): Promise<void> {
140 const entry = this.cache.get(key);
141 if (entry) {
142 // Update metadata in place
143 entry.metadata = metadata;
144 // Re-set to mark as recently used
145 this.cache.set(key, entry);
146 }
147 }
148
149 async getStats(): Promise<TierStats> {
150 return {
151 bytes: this.currentSize,
152 items: this.cache.size,
153 hits: this.stats.hits,
154 misses: this.stats.misses,
155 evictions: this.stats.evictions,
156 };
157 }
158
159 async clear(): Promise<void> {
160 this.cache.clear();
161 this.currentSize = 0;
162 }
163
164 /**
165 * Evict least-recently-used entries until there's space for new data.
166 *
167 * @param incomingSize - Size of data being added
168 *
169 * @remarks
170 * TinyLRU handles count-based eviction automatically.
171 * This method handles size-based eviction by using TinyLRU's built-in evict() method,
172 * which properly removes the LRU item without updating access order.
173 */
174 private async evictIfNeeded(incomingSize: number): Promise<void> {
175 // Keep evicting until we have enough space
176 while (this.currentSize + incomingSize > this.config.maxSizeBytes && this.cache.size > 0) {
177 // Get the LRU key (first in the list) without accessing it
178 const keys = this.cache.keys();
179 if (keys.length === 0) break;
180
181 const lruKey = keys[0];
182 if (!lruKey) break;
183
184 // Access the entry directly from internal items without triggering LRU update
185 // TinyLRU exposes items as a public property for this purpose
186 const entry = (this.cache as any).items[lruKey] as CacheEntry | undefined;
187 if (!entry) break;
188
189 // Use TinyLRU's built-in evict() which properly removes the LRU item
190 this.cache.evict();
191 this.currentSize -= entry.size;
192 this.stats.evictions++;
193 }
194 }
195}