1import { stringifyVariables } from '@urql/core';
2
3import type {
4 Link,
5 EntityField,
6 FieldInfo,
7 StorageAdapter,
8 SerializedEntries,
9 Dependencies,
10 OperationType,
11 DataField,
12 Data,
13} from '../types';
14
15import {
16 serializeKeys,
17 deserializeKeyInfo,
18 fieldInfoOfKey,
19 joinKeys,
20} from './keys';
21
22import { invariant, currentDebugStack } from '../helpers/help';
23
24type Dict<T> = Record<string, T>;
25type KeyMap<T> = Map<string, T>;
26type OperationMap<T> = Map<number, T>;
27
28interface NodeMap<T> {
29 optimistic: OperationMap<KeyMap<Dict<T | undefined>>>;
30 base: KeyMap<Dict<T>>;
31}
32
33export interface InMemoryData {
34 /** Flag for whether the data is waiting for hydration */
35 hydrating: boolean;
36 /** Flag for whether deferred tasks have been scheduled yet */
37 defer: boolean;
38 /** A list of entities that have been flagged for gargabe collection since no references to them are left */
39 gc: Set<string>;
40 /** A list of entity+field keys that will be persisted */
41 persist: Set<string>;
42 /** The API's "Query" typename which is needed to filter dependencies */
43 queryRootKey: string;
44 /** Number of references to each entity (except "Query") */
45 refCount: KeyMap<number>;
46 /** A map of entity fields (key-value entries per entity) */
47 records: NodeMap<EntityField>;
48 /** A map of entity links which are connections from one entity to another (key-value entries per entity) */
49 links: NodeMap<Link>;
50 /** A map of typename to a list of entity-keys belonging to said type */
51 types: Map<string, Set<string>>;
52 /** A set of Query operation keys that are in-flight and deferred/streamed */
53 deferredKeys: Set<number>;
54 /** A set of Query operation keys that are in-flight and awaiting a result */
55 commutativeKeys: Set<number>;
56 /** A set of Query operation keys that have been written to */
57 dirtyKeys: Set<number>;
58 /** The order of optimistic layers */
59 optimisticOrder: number[];
60 /** This may be a persistence adapter that will receive changes in a batch */
61 storage: StorageAdapter | null;
62 /** A map of all the types we have encountered that did not map directly to a concrete type */
63 abstractToConcreteMap: Map<string, Set<string>>;
64}
65
66let currentOwnership: null | WeakSet<any> = null;
67let currentDataMapping: null | WeakMap<any, any> = null;
68let currentData: null | InMemoryData = null;
69let currentOptimisticKey: null | number = null;
70export let currentOperation: null | OperationType = null;
71export let currentDependencies: null | Dependencies = null;
72export let currentForeignData = false;
73export let currentOptimistic = false;
74
75export function makeData(data: DataField | void, isArray?: false): Data;
76export function makeData(data: DataField | void, isArray: true): DataField[];
77
78/** Creates a new data object unless it's been created in this data run */
79export function makeData(data?: DataField | void, isArray?: boolean) {
80 let newData: Data | Data[] | undefined;
81 if (data) {
82 if (currentOwnership!.has(data)) return data;
83 newData = currentDataMapping!.get(data) as any;
84 }
85
86 if (newData == null) {
87 newData = (isArray ? [] : {}) as any;
88 }
89
90 if (data) {
91 currentDataMapping!.set(data, newData);
92 }
93
94 currentOwnership!.add(newData);
95 return newData;
96}
97
98export const ownsData = (data?: Data): boolean =>
99 !!data && currentOwnership!.has(data);
100
101/** Before reading or writing the global state needs to be initialised */
102export const initDataState = (
103 operationType: OperationType,
104 data: InMemoryData,
105 layerKey?: number | null,
106 isOptimistic?: boolean,
107 isForeignData?: boolean
108) => {
109 currentOwnership = new WeakSet();
110 currentDataMapping = new WeakMap();
111 currentOperation = operationType;
112 currentData = data;
113 currentDependencies = new Set();
114 currentOptimistic = !!isOptimistic;
115 currentForeignData = !!isForeignData;
116 if (process.env.NODE_ENV !== 'production') {
117 currentDebugStack.length = 0;
118 }
119
120 if (!layerKey) {
121 currentOptimisticKey = null;
122 } else if (currentOperation === 'read') {
123 // We don't create new layers for read operations and instead simply
124 // apply the currently available layer, if any
125 currentOptimisticKey = layerKey;
126 } else if (
127 isOptimistic ||
128 data.hydrating ||
129 data.optimisticOrder.length > 1
130 ) {
131 // If this operation isn't optimistic and we see it for the first time,
132 // then it must've been optimistic in the past, so we can proactively
133 // clear the optimistic data before writing
134 if (!isOptimistic && !data.commutativeKeys.has(layerKey)) {
135 reserveLayer(data, layerKey);
136 } else if (isOptimistic) {
137 if (
138 data.optimisticOrder.indexOf(layerKey) !== -1 &&
139 !data.commutativeKeys.has(layerKey)
140 ) {
141 data.optimisticOrder.splice(data.optimisticOrder.indexOf(layerKey), 1);
142 }
143 // NOTE: This optimally shouldn't happen as it implies that an optimistic
144 // write is being performed after a concrete write.
145 data.commutativeKeys.delete(layerKey);
146 }
147
148 // An optimistic update of a mutation may force an optimistic layer,
149 // or this Query update may be applied optimistically since it's part
150 // of a commutative chain
151 currentOptimisticKey = layerKey;
152 createLayer(data, layerKey);
153 } else {
154 // Otherwise we don't create an optimistic layer and clear the
155 // operation's one if it already exists
156 // We also do this when only one layer exists to avoid having to squash
157 // any layers at the end of writing this layer
158 currentOptimisticKey = null;
159 deleteLayer(data, layerKey);
160 }
161};
162
163/** Reset the data state after read/write is complete */
164export const clearDataState = () => {
165 // NOTE: This is only called to check for the invariant to pass
166 if (process.env.NODE_ENV !== 'production') {
167 getCurrentDependencies();
168 }
169
170 const data = currentData!;
171 const layerKey = currentOptimisticKey;
172 currentOptimistic = false;
173 currentOptimisticKey = null;
174
175 // Determine whether the current operation has been a commutative layer
176 if (
177 !data.hydrating &&
178 layerKey &&
179 data.optimisticOrder.indexOf(layerKey) > -1
180 ) {
181 // Squash all layers in reverse order (low priority upwards) that have
182 // been written already
183 let i = data.optimisticOrder.length;
184 while (
185 --i >= 0 &&
186 data.dirtyKeys.has(data.optimisticOrder[i]) &&
187 data.commutativeKeys.has(data.optimisticOrder[i])
188 )
189 squashLayer(data.optimisticOrder[i]);
190 }
191
192 currentOwnership = null;
193 currentDataMapping = null;
194 currentOperation = null;
195 currentData = null;
196 currentDependencies = null;
197 if (process.env.NODE_ENV !== 'production') {
198 currentDebugStack.length = 0;
199 }
200
201 if (process.env.NODE_ENV !== 'test') {
202 // Schedule deferred tasks if we haven't already, and if either a persist or GC run
203 // are likely to be needed
204 if (!data.defer && (data.storage || !data.optimisticOrder.length)) {
205 data.defer = true;
206 setTimeout(() => {
207 initDataState('read', data, null);
208 gc();
209 persistData();
210 clearDataState();
211 data.defer = false;
212 });
213 }
214 }
215};
216
217/** Initialises then resets the data state, which may squash this layer if necessary */
218export const noopDataState = (
219 data: InMemoryData,
220 layerKey: number | null,
221 isOptimistic?: boolean
222) => {
223 if (layerKey && !isOptimistic) data.deferredKeys.delete(layerKey);
224 initDataState('write', data, layerKey, isOptimistic);
225 clearDataState();
226};
227
228/** As we're writing, we keep around all the records and links we've read or have written to */
229export const getCurrentDependencies = (): Dependencies => {
230 invariant(
231 currentDependencies !== null,
232 'Invalid Cache call: The cache may only be accessed or mutated during' +
233 'operations like write or query, or as part of its resolvers, updaters, ' +
234 'or optimistic configs.',
235 2
236 );
237
238 return currentDependencies;
239};
240
241const DEFAULT_EMPTY_SET = new Set<string>();
242export const make = (queryRootKey: string): InMemoryData => ({
243 hydrating: false,
244 defer: false,
245 gc: new Set(),
246 types: new Map(),
247 persist: new Set(),
248 queryRootKey,
249 refCount: new Map(),
250 links: {
251 optimistic: new Map(),
252 base: new Map(),
253 },
254 abstractToConcreteMap: new Map(),
255 records: {
256 optimistic: new Map(),
257 base: new Map(),
258 },
259 deferredKeys: new Set(),
260 commutativeKeys: new Set(),
261 dirtyKeys: new Set(),
262 optimisticOrder: [],
263 storage: null,
264});
265
266/** Adds a node value to a NodeMap (taking optimistic values into account */
267const setNode = <T>(
268 map: NodeMap<T>,
269 entityKey: string,
270 fieldKey: string,
271 value: T
272) => {
273 if (process.env.NODE_ENV !== 'production') {
274 invariant(
275 currentOperation !== 'read',
276 'Invalid Cache write: You may not write to the cache during cache reads. ' +
277 ' Accesses to `cache.writeFragment`, `cache.updateQuery`, and `cache.link` may ' +
278 ' not be made inside `resolvers` for instance.',
279 27
280 );
281 }
282
283 // Optimistic values are written to a map in the optimistic dict
284 // All other values are written to the base map
285 const keymap: KeyMap<Dict<T | undefined>> = currentOptimisticKey
286 ? map.optimistic.get(currentOptimisticKey)!
287 : map.base;
288
289 // On the map itself we get or create the entity as a dict
290 let entity = keymap.get(entityKey) as Dict<T | undefined>;
291 if (entity === undefined) {
292 keymap.set(entityKey, (entity = Object.create(null)));
293 }
294
295 // If we're setting undefined we delete the node's entry
296 // On optimistic layers we actually set undefined so it can
297 // override the base value
298 if (value === undefined && !currentOptimisticKey) {
299 delete entity[fieldKey];
300 } else {
301 entity[fieldKey] = value;
302 }
303};
304
305/** Gets a node value from a NodeMap (taking optimistic values into account */
306const getNode = <T>(
307 map: NodeMap<T>,
308 entityKey: string,
309 fieldKey: string
310): T | undefined => {
311 let node: Dict<T | undefined> | undefined;
312 // A read may be initialised to skip layers until its own, which is useful for
313 // reading back written data. It won't skip over optimistic layers however
314 let skip =
315 !currentOptimistic &&
316 currentOperation === 'read' &&
317 currentOptimisticKey &&
318 currentData!.commutativeKeys.has(currentOptimisticKey);
319 // This first iterates over optimistic layers (in order)
320 for (let i = 0, l = currentData!.optimisticOrder.length; i < l; i++) {
321 const layerKey = currentData!.optimisticOrder[i];
322 const optimistic = map.optimistic.get(layerKey);
323 // If we're reading starting from a specific layer, we skip until a match
324 skip = skip && layerKey !== currentOptimisticKey;
325 // If the node and node value exists it is returned, including undefined
326 if (
327 optimistic &&
328 (!skip || !currentData!.commutativeKeys.has(layerKey)) &&
329 (!currentOptimistic ||
330 currentOperation === 'write' ||
331 currentData!.commutativeKeys.has(layerKey)) &&
332 (node = optimistic.get(entityKey)) !== undefined &&
333 fieldKey in node
334 ) {
335 return node[fieldKey];
336 }
337 }
338
339 // Otherwise we read the non-optimistic base value
340 node = map.base.get(entityKey);
341 return node !== undefined ? node[fieldKey] : undefined;
342};
343
344export function getRefCount(entityKey: string): number {
345 return currentData!.refCount.get(entityKey) || 0;
346}
347
348/** Adjusts the reference count of an entity on a refCount dict by "by" and updates the gc */
349const updateRCForEntity = (entityKey: string, by: number): void => {
350 // Retrieve the reference count and adjust it by "by"
351 const count = getRefCount(entityKey);
352 const newCount = count + by > 0 ? count + by : 0;
353 currentData!.refCount.set(entityKey, newCount);
354 // Add it to the garbage collection batch if it needs to be deleted or remove it
355 // from the batch if it needs to be kept
356 if (!newCount) currentData!.gc.add(entityKey);
357 else if (!count && newCount) currentData!.gc.delete(entityKey);
358};
359
360/** Adjusts the reference counts of all entities of a link on a refCount dict by "by" and updates the gc */
361const updateRCForLink = (link: Link | undefined, by: number): void => {
362 if (Array.isArray(link)) {
363 for (let i = 0, l = link.length; i < l; i++) updateRCForLink(link[i], by);
364 } else if (typeof link === 'string') {
365 updateRCForEntity(link, by);
366 }
367};
368
369/** Writes all parsed FieldInfo objects of a given node dict to a given array if it hasn't been seen */
370const extractNodeFields = <T>(
371 fieldInfos: FieldInfo[],
372 seenFieldKeys: Set<string>,
373 node: Dict<T> | undefined
374): void => {
375 if (node !== undefined) {
376 for (const fieldKey in node) {
377 if (!seenFieldKeys.has(fieldKey)) {
378 // If the node hasn't been seen the serialized fieldKey is turnt back into
379 // a rich FieldInfo object that also contains the field's name and arguments
380 fieldInfos.push(fieldInfoOfKey(fieldKey));
381 seenFieldKeys.add(fieldKey);
382 }
383 }
384 }
385};
386
387/** Writes all parsed FieldInfo objects of all nodes in a NodeMap to a given array */
388const extractNodeMapFields = <T>(
389 fieldInfos: FieldInfo[],
390 seenFieldKeys: Set<string>,
391 entityKey: string,
392 map: NodeMap<T>
393) => {
394 // Extracts FieldInfo for the entity in the base map
395 extractNodeFields(fieldInfos, seenFieldKeys, map.base.get(entityKey));
396
397 // Then extracts FieldInfo for the entity from the optimistic maps
398 for (let i = 0, l = currentData!.optimisticOrder.length; i < l; i++) {
399 const optimistic = map.optimistic.get(currentData!.optimisticOrder[i]);
400 if (optimistic !== undefined) {
401 extractNodeFields(fieldInfos, seenFieldKeys, optimistic.get(entityKey));
402 }
403 }
404};
405
406/** Garbage collects all entities that have been marked as having no references */
407export const gc = () => {
408 // If we're currently awaiting deferred results, abort GC run
409 if (currentData!.optimisticOrder.length) return;
410
411 // Iterate over all entities that have been marked for deletion
412 // Entities have been marked for deletion in `updateRCForEntity` if
413 // their reference count dropped to 0
414 for (const entityKey of currentData!.gc.keys()) {
415 // Remove the current key from the GC batch
416 currentData!.gc.delete(entityKey);
417
418 // Check first whether the entity has any references,
419 // if so, we skip it from the GC run
420 const rc = getRefCount(entityKey);
421 if (rc > 0) continue;
422
423 const record = currentData!.records.base.get(entityKey);
424 // Delete the reference count, and delete the entity from the GC batch
425 currentData!.refCount.delete(entityKey);
426 currentData!.records.base.delete(entityKey);
427
428 const typename = (record && record.__typename) as string | undefined;
429 if (typename) {
430 const type = currentData!.types.get(typename);
431 if (type) type.delete(entityKey);
432 }
433
434 const linkNode = currentData!.links.base.get(entityKey);
435 if (linkNode) {
436 currentData!.links.base.delete(entityKey);
437 for (const fieldKey in linkNode) updateRCForLink(linkNode[fieldKey], -1);
438 }
439 }
440};
441
442const updateDependencies = (entityKey: string, fieldKey?: string) => {
443 if (entityKey !== currentData!.queryRootKey) {
444 currentDependencies!.add(entityKey);
445 } else if (fieldKey !== undefined && fieldKey !== '__typename') {
446 currentDependencies!.add(joinKeys(entityKey, fieldKey));
447 }
448};
449
450const updatePersist = (entityKey: string, fieldKey: string) => {
451 if (!currentOptimistic && currentData!.storage) {
452 currentData!.persist.add(serializeKeys(entityKey, fieldKey));
453 }
454};
455
456/** Reads an entity's field (a "record") from data */
457export const readRecord = (
458 entityKey: string,
459 fieldKey: string
460): EntityField => {
461 if (currentOperation === 'read') {
462 updateDependencies(entityKey, fieldKey);
463 }
464 return getNode(currentData!.records, entityKey, fieldKey);
465};
466
467/** Reads an entity's link from data */
468export const readLink = (
469 entityKey: string,
470 fieldKey: string
471): Link | undefined => {
472 if (currentOperation === 'read') {
473 updateDependencies(entityKey, fieldKey);
474 }
475 return getNode(currentData!.links, entityKey, fieldKey);
476};
477
478export const getEntitiesForType = (typename: string): Set<string> =>
479 currentData!.types.get(typename) || DEFAULT_EMPTY_SET;
480
481export const writeType = (typename: string, entityKey: string) => {
482 const existingTypes = currentData!.types.get(typename);
483 if (!existingTypes) {
484 const typeSet = new Set<string>();
485 typeSet.add(entityKey);
486 currentData!.types.set(typename, typeSet);
487 } else {
488 existingTypes.add(entityKey);
489 }
490};
491
492export const getConcreteTypes = (typename: string): Set<string> =>
493 currentData!.abstractToConcreteMap.get(typename) || DEFAULT_EMPTY_SET;
494
495export const isSeenConcreteType = (typename: string): boolean =>
496 currentData!.types.has(typename);
497
498export const writeConcreteType = (
499 abstractType: string,
500 concreteType: string
501) => {
502 const existingTypes = currentData!.abstractToConcreteMap.get(abstractType);
503 if (!existingTypes) {
504 const typeSet = new Set<string>();
505 typeSet.add(concreteType);
506 currentData!.abstractToConcreteMap.set(abstractType, typeSet);
507 } else {
508 existingTypes.add(concreteType);
509 }
510};
511
512/** Writes an entity's field (a "record") to data */
513export const writeRecord = (
514 entityKey: string,
515 fieldKey: string,
516 value?: EntityField
517) => {
518 const existing = getNode(currentData!.records, entityKey, fieldKey);
519 if (!isEqualLinkOrScalar(existing, value)) {
520 updateDependencies(entityKey, fieldKey);
521 updatePersist(entityKey, fieldKey);
522 }
523
524 setNode(currentData!.records, entityKey, fieldKey, value);
525};
526
527export const hasField = (entityKey: string, fieldKey: string): boolean =>
528 readRecord(entityKey, fieldKey) !== undefined ||
529 readLink(entityKey, fieldKey) !== undefined;
530
531/** Writes an entity's link to data */
532export const writeLink = (
533 entityKey: string,
534 fieldKey: string,
535 link?: Link | undefined
536) => {
537 // Retrieve the link NodeMap from either an optimistic or the base layer
538 const links = currentOptimisticKey
539 ? currentData!.links.optimistic.get(currentOptimisticKey)
540 : currentData!.links.base;
541 // Update the reference count for the link
542 if (!currentOptimisticKey) {
543 const entityLinks = links && links.get(entityKey);
544 updateRCForLink(entityLinks && entityLinks[fieldKey], -1);
545 updateRCForLink(link, 1);
546 }
547 const existing = getNode(currentData!.links, entityKey, fieldKey);
548 if (!isEqualLinkOrScalar(existing, link)) {
549 updateDependencies(entityKey, fieldKey);
550 updatePersist(entityKey, fieldKey);
551 }
552
553 // Update the link
554 setNode(currentData!.links, entityKey, fieldKey, link);
555};
556
557/** Reserves an optimistic layer and preorders it */
558export const reserveLayer = (
559 data: InMemoryData,
560 layerKey: number,
561 hasNext?: boolean
562) => {
563 // Find the current index for the layer, and remove it from
564 // the order if it exists already
565 let index = data.optimisticOrder.indexOf(layerKey);
566 if (index > -1) data.optimisticOrder.splice(index, 1);
567
568 if (hasNext) {
569 data.deferredKeys.add(layerKey);
570 // If the layer has future results then we'll move it past any layer that's
571 // still empty, so currently pending operations will take precedence over it
572 for (
573 index = index > -1 ? index : 0;
574 index < data.optimisticOrder.length &&
575 !data.deferredKeys.has(data.optimisticOrder[index]) &&
576 (!data.dirtyKeys.has(data.optimisticOrder[index]) ||
577 !data.commutativeKeys.has(data.optimisticOrder[index]));
578 index++
579 );
580 } else {
581 data.deferredKeys.delete(layerKey);
582 // Protect optimistic layers from being turned into non-optimistic layers
583 // while preserving optimistic data
584 if (index > -1 && !data.commutativeKeys.has(layerKey))
585 clearLayer(data, layerKey);
586 index = 0;
587 }
588
589 // Register the layer with the deferred or "top" index and
590 // mark it as commutative
591 data.optimisticOrder.splice(index, 0, layerKey);
592 data.commutativeKeys.add(layerKey);
593};
594
595/** Checks whether a given layer exists */
596export const hasLayer = (data: InMemoryData, layerKey: number) =>
597 data.commutativeKeys.has(layerKey) ||
598 data.optimisticOrder.indexOf(layerKey) > -1;
599
600/** Creates an optimistic layer of links and records */
601const createLayer = (data: InMemoryData, layerKey: number) => {
602 if (data.optimisticOrder.indexOf(layerKey) === -1) {
603 data.optimisticOrder.unshift(layerKey);
604 }
605
606 if (!data.dirtyKeys.has(layerKey)) {
607 data.dirtyKeys.add(layerKey);
608 data.links.optimistic.set(layerKey, new Map());
609 data.records.optimistic.set(layerKey, new Map());
610 }
611};
612
613/** Clears all links and records of an optimistic layer */
614const clearLayer = (data: InMemoryData, layerKey: number) => {
615 if (data.dirtyKeys.has(layerKey)) {
616 data.dirtyKeys.delete(layerKey);
617 data.records.optimistic.delete(layerKey);
618 data.links.optimistic.delete(layerKey);
619 data.deferredKeys.delete(layerKey);
620 }
621};
622
623/** Deletes links and records of an optimistic layer, and the layer itself */
624const deleteLayer = (data: InMemoryData, layerKey: number) => {
625 const index = data.optimisticOrder.indexOf(layerKey);
626 if (index > -1) {
627 data.optimisticOrder.splice(index, 1);
628 data.commutativeKeys.delete(layerKey);
629 }
630
631 clearLayer(data, layerKey);
632};
633
634/** Merges an optimistic layer of links and records into the base data */
635const squashLayer = (layerKey: number) => {
636 // Hide current dependencies from squashing operations
637 const previousDependencies = currentDependencies;
638 currentDependencies = new Set();
639 currentOperation = 'write';
640
641 const links = currentData!.links.optimistic.get(layerKey);
642 if (links) {
643 for (const entry of links.entries()) {
644 const entityKey = entry[0];
645 const keyMap = entry[1];
646 for (const fieldKey in keyMap) {
647 writeLink(entityKey, fieldKey, keyMap[fieldKey]);
648 }
649 }
650 }
651
652 const records = currentData!.records.optimistic.get(layerKey);
653 if (records) {
654 for (const entry of records.entries()) {
655 const entityKey = entry[0];
656 const keyMap = entry[1];
657 for (const fieldKey in keyMap) {
658 writeRecord(entityKey, fieldKey, keyMap[fieldKey]);
659 }
660 }
661 }
662
663 currentDependencies = previousDependencies;
664 deleteLayer(currentData!, layerKey);
665};
666
667/** Return an array of FieldInfo (info on all the fields and their arguments) for a given entity */
668export const inspectFields = (entityKey: string): FieldInfo[] => {
669 const { links, records } = currentData!;
670 const fieldInfos: FieldInfo[] = [];
671 const seenFieldKeys: Set<string> = new Set();
672 // Update dependencies
673 updateDependencies(entityKey);
674 // Extract FieldInfos to the fieldInfos array for links and records
675 // This also deduplicates by keeping track of fieldKeys in the seenFieldKeys Set
676 extractNodeMapFields(fieldInfos, seenFieldKeys, entityKey, links);
677 extractNodeMapFields(fieldInfos, seenFieldKeys, entityKey, records);
678 return fieldInfos;
679};
680
681export const persistData = () => {
682 if (currentData!.storage) {
683 currentOptimistic = true;
684 currentOperation = 'read';
685 const entries: SerializedEntries = {};
686 for (const key of currentData!.persist.keys()) {
687 const { entityKey, fieldKey } = deserializeKeyInfo(key);
688 let x: void | Link | EntityField;
689 if ((x = readLink(entityKey, fieldKey)) !== undefined) {
690 entries[key] = `:${stringifyVariables(x)}`;
691 } else if ((x = readRecord(entityKey, fieldKey)) !== undefined) {
692 entries[key] = stringifyVariables(x);
693 } else {
694 entries[key] = undefined;
695 }
696 }
697
698 currentOptimistic = false;
699 currentData!.storage.writeData(entries);
700 currentData!.persist.clear();
701 }
702};
703
704export const hydrateData = (
705 data: InMemoryData,
706 storage: StorageAdapter,
707 entries: SerializedEntries
708) => {
709 initDataState('write', data, null);
710
711 for (const key in entries) {
712 const value = entries[key];
713 if (value !== undefined) {
714 const { entityKey, fieldKey } = deserializeKeyInfo(key);
715 if (value[0] === ':') {
716 if (readLink(entityKey, fieldKey) === undefined)
717 writeLink(entityKey, fieldKey, JSON.parse(value.slice(1)));
718 } else {
719 if (readRecord(entityKey, fieldKey) === undefined)
720 writeRecord(entityKey, fieldKey, JSON.parse(value));
721 }
722 }
723 }
724
725 data.storage = storage;
726 data.hydrating = false;
727 clearDataState();
728};
729
730function isEqualLinkOrScalar(
731 a: Link | EntityField | undefined,
732 b: Link | EntityField | undefined
733) {
734 if (typeof a !== typeof b) return false;
735 if (a !== b) return false;
736 if (Array.isArray(a) && Array.isArray(b)) {
737 if (a.length !== b.length) return false;
738 return !a.some((el, index) => el !== b[index]);
739 }
740
741 return true;
742}