diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts index e35469d9960..011df4b52d2 100644 --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -597,6 +597,127 @@ describe('Cache', () => { ).toEqual({ a: 1, b: 2, c: 3 }); }, ); + + it("should not accidentally depend on unrelated entity fields", () => { + const cache = new InMemoryCache({ + resultCaching: true, + }); + + const firstNameFragment = gql` + fragment FirstNameFragment on Person { + firstName + } + `; + + const lastNameFragment = gql` + fragment LastNameFragment on Person { + lastName + } + `; + + const bothNamesData = { + __typename: "Person", + id: 123, + firstName: "Ben", + lastName: "Newman", + }; + + const id = cache.identify(bothNamesData); + + cache.writeFragment({ + id, + fragment: firstNameFragment, + data: bothNamesData, + }); + + expect(cache.extract()).toEqual({ + "Person:123": { + __typename: "Person", + firstName: "Ben", + }, + }); + + const firstNameResult = cache.readFragment({ + id, + fragment: firstNameFragment, + }); + + expect(firstNameResult).toEqual({ + __typename: "Person", + firstName: "Ben", + }); + + cache.writeFragment({ + id, + fragment: lastNameFragment, + data: bothNamesData, + }); + + expect(cache.extract()).toEqual({ + "Person:123": { + __typename: "Person", + firstName: "Ben", + lastName: "Newman", + }, + }); + + // This is the crucial test: modifying the lastName field should not + // invalidate results that did not depend on the lastName field. + expect(cache.readFragment({ + id, + fragment: firstNameFragment, + })).toBe(firstNameResult); + + const lastNameResult = cache.readFragment({ + id, + fragment: lastNameFragment, + }); + + expect(lastNameResult).toEqual({ + __typename: "Person", + lastName: "Newman", + }); + + cache.writeFragment({ + id, + fragment: firstNameFragment, + data: { + ...bothNamesData, + firstName: "Benjamin", + }, + }); + + expect(cache.extract()).toEqual({ + "Person:123": { + __typename: "Person", + firstName: "Benjamin", + lastName: "Newman", + }, + }); + + const benjaminResult = cache.readFragment({ + id, + fragment: firstNameFragment, + }); + + expect(benjaminResult).toEqual({ + __typename: "Person", + firstName: "Benjamin", + }); + + // Still the same as it was? + expect(firstNameResult).toEqual({ + __typename: "Person", + firstName: "Ben", + }); + + // Updating the firstName should not have invalidated the + // previously-read lastNameResult. + expect(cache.readFragment({ + id, + fragment: lastNameFragment, + })).toBe(lastNameResult); + }); }); describe('writeQuery', () => { diff --git a/src/cache/inmemory/__tests__/diffAgainstStore.ts b/src/cache/inmemory/__tests__/diffAgainstStore.ts index 15e93e7ac47..f8796eb0109 100644 --- a/src/cache/inmemory/__tests__/diffAgainstStore.ts +++ b/src/cache/inmemory/__tests__/diffAgainstStore.ts @@ -179,7 +179,7 @@ describe('diffing queries against the store', () => { }); expect(complete).toBeTruthy(); - expect(store.get('Person:{"id":"1"}')).toEqual({ + expect((store as any).lookup('Person:{"id":"1"}')).toEqual({ __typename: 'Person', id: '1', name: 'Luke Skywalker', diff --git a/src/cache/inmemory/__tests__/recordingCache.ts b/src/cache/inmemory/__tests__/recordingCache.ts index cf75417078d..ea9ac2186ea 100644 --- a/src/cache/inmemory/__tests__/recordingCache.ts +++ b/src/cache/inmemory/__tests__/recordingCache.ts @@ -1,4 +1,4 @@ -import { NormalizedCacheObject } from '../types'; +import { NormalizedCacheObject, StoreObject } from '../types'; import { EntityStore } from '../entityStore'; describe('Optimistic EntityStore layering', () => { @@ -6,6 +6,10 @@ describe('Optimistic EntityStore layering', () => { return root.addLayer('whatever', () => {}); } + function lookup(store: EntityStore, dataId: string) { + return (store as any).lookup(dataId) as StoreObject; + } + describe('returns correct values during recording', () => { const data = { Human: { __typename: 'Human', name: 'Mark' }, @@ -24,23 +28,23 @@ describe('Optimistic EntityStore layering', () => { }); it('should passthrough values if not defined in recording', () => { - expect(store.get('Human')).toBe(data.Human); - expect(store.get('Animal')).toBe(data.Animal); + expect(lookup(store, 'Human')).toBe(data.Human); + expect(lookup(store, 'Animal')).toBe(data.Animal); }); it('should return values defined during recording', () => { store.merge('Human', dataToRecord.Human); - expect(store.get('Human')).toEqual(dataToRecord.Human); - expect(underlyingStore.get('Human')).toBe(data.Human); + expect(lookup(store, 'Human')).toEqual(dataToRecord.Human); + expect(lookup(underlyingStore, 'Human')).toBe(data.Human); }); it('should return undefined for values deleted during recording', () => { - expect(store.get('Animal')).toBe(data.Animal); + expect(lookup(store, 'Animal')).toBe(data.Animal); // delete should be registered in the recording: store.delete('Animal'); - expect(store.get('Animal')).toBeUndefined(); + expect(lookup(store, 'Animal')).toBeUndefined(); expect(store.toObject()).toHaveProperty('Animal'); - expect(underlyingStore.get('Animal')).toBe(data.Animal); + expect(lookup(underlyingStore, 'Animal')).toBe(data.Animal); }); }); diff --git a/src/cache/inmemory/__tests__/writeToStore.ts b/src/cache/inmemory/__tests__/writeToStore.ts index 8b0cb021ef6..483357c4c41 100644 --- a/src/cache/inmemory/__tests__/writeToStore.ts +++ b/src/cache/inmemory/__tests__/writeToStore.ts @@ -1507,7 +1507,7 @@ describe('writing to the store', () => { }); Object.keys(store.toObject()).forEach(field => { - expect(store.get(field)).toEqual(newStore.get(field)); + expect((store as any).lookup(field)).toEqual((newStore as any).lookup(field)); }); }); @@ -1543,7 +1543,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('1')).toEqual(result.todos[0]); + expect((newStore as any).lookup('1')).toEqual(result.todos[0]); }); it('should warn when it receives the wrong data with non-union fragments', () => { @@ -1569,7 +1569,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('1')).toEqual(result.todos[0]); + expect((newStore as any).lookup('1')).toEqual(result.todos[0]); }, /Missing field description/); }); @@ -1623,7 +1623,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('1')).toEqual(result.todos[0]); + expect((newStore as any).lookup('1')).toEqual(result.todos[0]); }, /Missing field price/); }); @@ -1651,7 +1651,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('1')).toEqual(result.todos[0]); + expect((newStore as any).lookup('1')).toEqual(result.todos[0]); }, /Missing field __typename/); }); @@ -1671,7 +1671,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('ROOT_QUERY')).toEqual({ + expect((newStore as any).lookup('ROOT_QUERY')).toEqual({ __typename: 'Query', todos: null, }); @@ -1700,7 +1700,7 @@ describe('writing to the store', () => { result, }); - expect(newStore.get('ROOT_QUERY')).toEqual({ __typename: 'Query', id: 1 }); + expect((newStore as any).lookup('ROOT_QUERY')).toEqual({ __typename: 'Query', id: 1 }); expect(console.warn).not.toBeCalled(); console.warn = originalWarn; }); diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts index b43f899e52a..1c88766d674 100644 --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -36,17 +36,14 @@ export abstract class EntityStore implements NormalizedCache { return { ...this.data }; } - public has(dataId: string, fieldName?: string): boolean { - return this.get(dataId, fieldName) !== void 0; + public has(dataId: string): boolean { + return this.lookup(dataId, true) !== void 0; } - public get(dataId: string): StoreObject; - public get(dataId: string, fieldName: string): StoreValue; - public get(dataId: string, fieldName?: string): StoreValue { + public get(dataId: string, fieldName: string): StoreValue { this.group.depend(dataId, fieldName); if (hasOwn.call(this.data, dataId)) { const storeObject = this.data[dataId]; - if (!fieldName) return storeObject; if (storeObject && hasOwn.call(storeObject, fieldName)) { return storeObject[fieldName]; } @@ -56,17 +53,28 @@ export abstract class EntityStore implements NormalizedCache { } } + private lookup(dataId: string, dependOnExistence?: boolean): StoreObject { + // The has method (above) calls lookup with dependOnExistence = true, so + // that it can later be invalidated when we add or remove a StoreObject for + // this dataId. Any consumer who cares about the contents of the StoreObject + // should not rely on this dependency, since the contents could change + // without the object being added or removed. + if (dependOnExistence) this.group.depend(dataId, "__exists"); + return hasOwn.call(this.data, dataId) ? this.data[dataId] : + this instanceof Layer ? this.parent.lookup(dataId, dependOnExistence) : void 0; + } + public merge(dataId: string, incoming: StoreObject): void { - const existing = this.get(dataId); - const merged = new DeepMerger(storeObjectReconciler) - .merge(existing, incoming, this); + const existing = this.lookup(dataId); + const merged = new DeepMerger(storeObjectReconciler).merge(existing, incoming, this); if (merged !== existing) { this.data[dataId] = merged; delete this.refs[dataId]; if (this.group.caching) { - // First, invalidate any dependents that called store.get(id) - // rather than store.get(id, fieldName). - this.group.dirty(dataId); + // If we added a new StoreObject where there was previously none, dirty + // anything that depended on the existence of this dataId, such as the + // EntityStore#has method. + if (!existing) this.group.dirty(dataId, "__exists"); // Now invalidate dependents who called getFieldValue for any // fields that are changing as a result of this merge. Object.keys(incoming).forEach(storeFieldName => { @@ -83,24 +91,20 @@ export abstract class EntityStore implements NormalizedCache { // fields of that entity whose names match fieldName, according to the // fieldNameFromStoreName helper function. public delete(dataId: string, fieldName?: string) { - const storeObject = this.get(dataId); + const storeObject = this.lookup(dataId); if (storeObject) { // In case someone passes in a storeFieldName (field.name.value + // arguments key), normalize it down to just the field name. fieldName = fieldName && fieldNameFromStoreName(fieldName); - const storeNamesToDelete: string[] = []; - Object.keys(storeObject).forEach(storeFieldName => { + const storeNamesToDelete = Object.keys(storeObject).filter( // If the field value has already been set to undefined, we do not // need to delete it again. - if (storeObject[storeFieldName] !== void 0 && - // If no fieldName provided, delete all fields from storeObject. - // If provided, delete all fields matching fieldName. - (!fieldName || fieldName === fieldNameFromStoreName(storeFieldName))) { - storeNamesToDelete.push(storeFieldName); - } - }); + storeFieldName => storeObject[storeFieldName] !== void 0 && + // If no fieldName provided, delete all fields from storeObject. + // If provided, delete all fields matching fieldName. + (!fieldName || fieldName === fieldNameFromStoreName(storeFieldName))); if (storeNamesToDelete.length) { // If we only have to worry about the Root layer of the store, @@ -124,7 +128,7 @@ export abstract class EntityStore implements NormalizedCache { // delete this.rootIds[dataId]; delete this.refs[dataId]; - const fieldsToDirty: Record = Object.create(null); + const fieldsToDirty = new Set(); if (fieldName) { // If we have a fieldName and it matches more than zero fields, @@ -137,20 +141,21 @@ export abstract class EntityStore implements NormalizedCache { // Although it would be logically correct to dirty each // storeFieldName in the loop above, we know that they all have // the same name, according to fieldNameFromStoreName. - fieldsToDirty[fieldName] = true; + fieldsToDirty.add(fieldName); } else { // If no fieldName was provided, then we delete the whole entity // from the cache. remove(this.data, dataId); storeNamesToDelete.forEach(storeFieldName => { - const fieldName = fieldNameFromStoreName(storeFieldName); - fieldsToDirty[fieldName] = true; + fieldsToDirty.add(fieldNameFromStoreName(storeFieldName)); }); + // Let dependents (such as EntityStore#has) know that this dataId has + // been removed from this layer of the store. + fieldsToDirty.add("__exists"); } if (this.group.caching) { - this.group.dirty(dataId); - Object.keys(fieldsToDirty).forEach(fieldName => { + fieldsToDirty.forEach(fieldName => { this.group.dirty(dataId, fieldName); }); } @@ -297,13 +302,13 @@ class CacheGroup { this.d = caching ? dep() : null; } - public depend(dataId: string, storeFieldName?: string) { + public depend(dataId: string, storeFieldName: string) { if (this.d) { this.d(makeDepKey(dataId, storeFieldName)); } } - public dirty(dataId: string, storeFieldName?: string) { + public dirty(dataId: string, storeFieldName: string) { if (this.d) { this.d.dirty(makeDepKey(dataId, storeFieldName)); } @@ -314,12 +319,8 @@ class CacheGroup { public readonly keyMaker = new KeyTrie(canUseWeakMap); } -function makeDepKey(dataId: string, storeFieldName?: string) { - const parts = [dataId]; - if (typeof storeFieldName === "string") { - parts.push(fieldNameFromStoreName(storeFieldName)); - } - return JSON.stringify(parts); +function makeDepKey(dataId: string, storeFieldName: string) { + return JSON.stringify([dataId, fieldNameFromStoreName(storeFieldName)]); } export namespace EntityStore { @@ -365,7 +366,7 @@ export namespace EntityStore { class Layer extends EntityStore { constructor( public readonly id: string, - public readonly parent: Layer | EntityStore.Root, + public readonly parent: EntityStore, public readonly replay: (layer: EntityStore) => any, public readonly group: CacheGroup, ) { diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts index 5639205cb23..37ee24a2256 100644 --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -19,8 +19,7 @@ export declare type IdGetter = ( * StoreObjects from the cache */ export interface NormalizedCache { - has(dataId: string, fieldName?: string): boolean; - get(dataId: string): StoreObject; + has(dataId: string): boolean; get(dataId: string, fieldName: string): StoreValue; merge(dataId: string, incoming: StoreObject): void; delete(dataId: string, fieldName?: string): boolean;