From e4e97174ddabae2e689cea696a4eca15b8c6218a Mon Sep 17 00:00:00 2001 From: Bradley Maier Date: Fri, 3 Feb 2017 00:17:41 -0500 Subject: [PATCH] Finish stores conversion to TS 2.2 mixins --- package.json | 1 - src/interfaces.d.ts | 105 +++ src/patch/Patch.ts | 2 - src/query/CompoundQuery.ts | 2 +- src/query/createFilter.ts | 2 +- src/query/createSort.ts | 2 +- src/query/createStoreRange.ts | 2 +- src/query/interfaces.d.ts | 13 - src/storage/InMemoryStorage.ts | 27 +- src/storage/IndexedDBStorage.ts | 6 +- src/store/ObservableStore.ts | 686 ++++++++++++++ src/store/QueryResult.ts | 857 +++++++++++++++++ src/store/QueryableStore.ts | 149 +++ src/store/StoreBase.ts | 167 ++++ src/store/createQueryTransformResult.ts | 889 ------------------ src/store/createStore.ts | 238 ----- src/store/createStoreObservable.ts | 10 +- src/store/materialize.ts | 12 +- .../mixins/createObservableStoreMixin.ts | 757 --------------- src/store/mixins/createQueryTransformMixin.ts | 177 ---- src/store/mixins/createTransactionMixin.ts | 108 --- tests/unit/all.ts | 11 +- tests/unit/query/CompoundQuery.ts | 2 +- tests/unit/storage/InMemoryStorage.ts | 4 +- tests/unit/storage/IndexedDBStorage.ts | 7 +- ...rvableStoreMixin.ts => ObservableStore.ts} | 40 +- .../querying.ts | 86 +- .../tracking.ts | 45 +- .../transforming.ts | 12 +- .../store/{createStore.ts => StoreBase.ts} | 85 +- tests/unit/store/materialize.ts | 45 +- .../store/mixins/createTransactionMixin.ts | 144 --- tests/unit/support/AsyncStorage.ts | 121 ++- 33 files changed, 2211 insertions(+), 2603 deletions(-) create mode 100644 src/interfaces.d.ts delete mode 100644 src/query/interfaces.d.ts create mode 100644 src/store/ObservableStore.ts create mode 100644 src/store/QueryResult.ts create mode 100644 src/store/QueryableStore.ts create mode 100644 src/store/StoreBase.ts delete mode 100644 src/store/createQueryTransformResult.ts delete mode 100644 src/store/createStore.ts delete mode 100644 src/store/mixins/createObservableStoreMixin.ts delete mode 100644 src/store/mixins/createQueryTransformMixin.ts delete mode 100644 src/store/mixins/createTransactionMixin.ts rename tests/unit/store/{mixins/createObservableStoreMixin.ts => ObservableStore.ts} (94%) rename tests/unit/store/{mixins/createQueryTransformMixin => QueryableStore}/querying.ts (90%) rename tests/unit/store/{mixins/createQueryTransformMixin => QueryableStore}/tracking.ts (90%) rename tests/unit/store/{mixins/createQueryTransformMixin => QueryableStore}/transforming.ts (98%) rename tests/unit/store/{createStore.ts => StoreBase.ts} (91%) delete mode 100644 tests/unit/store/mixins/createTransactionMixin.ts diff --git a/package.json b/package.json index 992e251..455fd33 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,6 @@ "test": "grunt test" }, "peerDependencies": { - "@dojo/compose": "2.0.0-beta.24", "@dojo/core": "2.0.0-alpha.25", "@dojo/has": "2.0.0-alpha.8", "@dojo/shim": "2.0.0-beta.10" diff --git a/src/interfaces.d.ts b/src/interfaces.d.ts new file mode 100644 index 0000000..c36a273 --- /dev/null +++ b/src/interfaces.d.ts @@ -0,0 +1,105 @@ +import Patch from './patch/Patch'; +import { Subscribable } from '@dojo/core/Observable'; +import Map from '@dojo/shim/Map'; +import Promise from '@dojo/shim/Promise'; + +export interface Query { + apply(data: T[]): T[]; + toString(querySerializer?: (query: Query) => string): string; + incremental?: boolean; + queryType?: QueryType; +} + +export type Constructor = new (...args: any[]) => T; + +export const enum QueryType { + Filter, + Sort, + Range, + Compound +} + +export interface CrudOptions { + rejectOverwrite?: boolean; + id?: string; +} + +export interface UpdateResults { + currentItems?: T[]; + failedData?: CrudArgument[]; + successfulData: T[] | string[]; + type: StoreOperation; +} + +/** + * Adds a then method to the observable for those consumers of the store API who + * only want to know about the end result of an operation, and don't want to deal with + * any recoverable failures. + */ +export type StoreObservable = Subscribable & Promise + +export interface Storage { + identify(items: T[]|T): string[]; + createId(): Promise; + fetch(query?: Query): FetchResult; + get(ids: string[]): Promise; + put(items: T[], options?: O): Promise>; + add(items: T[], options?: O): Promise>; + delete(ids: string[]): Promise>; + patch(updates: { id: string; patch: Patch }[], options?: O): Promise>; +} + +export interface Store> { + get(ids: string[]): Promise; + get(id: string): Promise; + get(ids: string | string[]): Promise; + identify(items: T[]): string[]; + identify(items: T): string; + identify(items: T | T[]): string | string[]; + createId(): Promise; + add(items: T[] | T, options?: O): StoreObservable; + put(items: T[] | T, options?: O): StoreObservable; + patch(updates: PatchArgument, options?: O): StoreObservable; + delete(ids: string[] | string): StoreObservable; + fetch(query?: Query): FetchResult; +} + +export const enum StoreOperation { + Add, + Put, + Patch, + Delete +} + +export interface StoreOptions { + data?: T[]; + idProperty?: keyof T; + idFunction?: (item: T) => string; + storage?: Storage; +} + +export type CrudArgument = T | string | PatchMapEntry; + +export type BasicPatch = { id: string } & { + [P in keyof T]?: T[P] | BasicPatch; + }; + +export type PatchArgument = Map> | + { id: string; patch: Patch } | + { id: string; patch: Patch }[] | + BasicPatch | + BasicPatch[]; + +export interface FetchResult extends Promise { + /** + * A Promise that resolves to the total number of items in the underlying storage. + */ + totalLength: Promise; + /** + * For a store, this is identical to totalLength. For a QueryTransformResultInterface, this resolves to the number of items + * that match the QueryTransformResultInterface's queries + */ + dataLength: Promise; +} + +export type PatchMapEntry = { id: string; patch: Patch }; diff --git a/src/patch/Patch.ts b/src/patch/Patch.ts index f99add6..bcdd2d2 100644 --- a/src/patch/Patch.ts +++ b/src/patch/Patch.ts @@ -2,8 +2,6 @@ import { shouldRecurseInto, isEqual } from '../utils'; import createOperation, { Operation, OperationType } from './createOperation'; import JsonPointer from './JsonPointer'; -export type PatchMapEntry = { id: string; patch: Patch }; - function _diff(to: any, from: any, startingPath?: JsonPointer): Operation[] { if (!shouldRecurseInto(from) || !shouldRecurseInto(to)) { return []; diff --git a/src/query/CompoundQuery.ts b/src/query/CompoundQuery.ts index 130627c..cc4f392 100644 --- a/src/query/CompoundQuery.ts +++ b/src/query/CompoundQuery.ts @@ -1,4 +1,4 @@ -import { Query, QueryType } from './interfaces'; +import { Query, QueryType } from '../interfaces'; export interface QueryOptions { query?: Query; diff --git a/src/query/createFilter.ts b/src/query/createFilter.ts index 56c2d83..335022e 100644 --- a/src/query/createFilter.ts +++ b/src/query/createFilter.ts @@ -1,6 +1,6 @@ import JsonPointer, { navigate } from '../patch/JsonPointer'; import { isEqual } from '../utils'; -import { Query, QueryType } from './interfaces'; +import { Query, QueryType } from '../interfaces'; export type FilterFunction = (data: T[]) => T[]; export type ObjectPointer = JsonPointer | keyof T | ''; diff --git a/src/query/createSort.ts b/src/query/createSort.ts index 99f9a92..4d5eb77 100644 --- a/src/query/createSort.ts +++ b/src/query/createSort.ts @@ -1,4 +1,4 @@ -import { Query, QueryType } from './interfaces'; +import { Query, QueryType } from '../interfaces'; import JsonPointer, { navigate } from '../patch/JsonPointer'; export type SortParameter = ((a: T, b: T) => number) | keyof T | JsonPointer; diff --git a/src/query/createStoreRange.ts b/src/query/createStoreRange.ts index b7c120d..35e82c9 100644 --- a/src/query/createStoreRange.ts +++ b/src/query/createStoreRange.ts @@ -1,4 +1,4 @@ -import { Query, QueryType } from './interfaces'; +import { Query, QueryType } from '../interfaces'; export interface StoreRange extends Query { readonly start: number; readonly count: number; diff --git a/src/query/interfaces.d.ts b/src/query/interfaces.d.ts deleted file mode 100644 index 4d57c89..0000000 --- a/src/query/interfaces.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -export interface Query { - apply(data: T[]): T[]; - toString(querySerializer?: (query: Query) => string): string; - incremental?: boolean; - queryType: QueryType; -} - -export const enum QueryType { - Filter, - Sort, - Range, - Compound -} diff --git a/src/storage/InMemoryStorage.ts b/src/storage/InMemoryStorage.ts index 9a15f33..3867dab 100644 --- a/src/storage/InMemoryStorage.ts +++ b/src/storage/InMemoryStorage.ts @@ -1,34 +1,11 @@ -import { Query } from '../query/interfaces'; -import { StoreOperation, CrudOptions, StoreOptions, UpdateResults } from '../store/createStore'; +import { Query, FetchResult } from '../interfaces'; +import { StoreOperation, CrudOptions, StoreOptions, UpdateResults, Storage } from '../interfaces'; import Promise from '@dojo/shim/Promise'; import Map from '@dojo/shim/Map'; import Patch from '../patch/Patch'; import { duplicate } from '@dojo/core/lang'; import uuid from '@dojo/core/uuid'; -export interface FetchResult extends Promise { - /** - * A Promise that resolves to the total number of items in the underlying storage. - */ - totalLength: Promise; - /** - * For a store, this is identical to totalLength. For a QueryTransformResult, this resolves to the number of items - * that match the QueryTransformResult's queries - */ - dataLength: Promise; -} - -export interface Storage { - identify(items: T[]|T): string[]; - createId(): Promise; - fetch(query?: Query): FetchResult; - get(ids: string[]): Promise; - put(items: T[], options?: O): Promise>; - add(items: T[], options?: O): Promise>; - delete(ids: string[]): Promise>; - patch(updates: { id: string; patch: Patch }[], options?: O): Promise>; -} - export default class InMemoryStorage implements Storage { private idProperty?: keyof T; private idFunction?: (item: T) => string; diff --git a/src/storage/IndexedDBStorage.ts b/src/storage/IndexedDBStorage.ts index 2355cab..6203deb 100644 --- a/src/storage/IndexedDBStorage.ts +++ b/src/storage/IndexedDBStorage.ts @@ -1,6 +1,6 @@ -import InMemoryStorage, { FetchResult } from './InMemoryStorage'; -import { StoreOptions, CrudOptions, StoreOperation, UpdateResults } from '../store/createStore'; -import {Query, QueryType} from '../query/interfaces'; +import InMemoryStorage from './InMemoryStorage'; +import { StoreOptions, CrudOptions, StoreOperation, UpdateResults, FetchResult } from '../interfaces'; +import {Query, QueryType} from '../interfaces'; import Set from '@dojo/shim/Set'; import Promise from '@dojo/shim/Promise'; import Patch from '../patch/Patch'; diff --git a/src/store/ObservableStore.ts b/src/store/ObservableStore.ts new file mode 100644 index 0000000..3df702c --- /dev/null +++ b/src/store/ObservableStore.ts @@ -0,0 +1,686 @@ +import StoreBase from './StoreBase'; +import { Observable, Observer } from '@dojo/core/Observable'; +import Map from '@dojo/shim/Map'; +import Set from '@dojo/shim/Set'; +import Promise from '@dojo/shim/Promise'; +import { CrudOptions, Store, StoreOptions, UpdateResults, Query, PatchArgument } from '../interfaces'; +import { debounce } from '@dojo/core/util'; +import { after } from 'dojo/aspect'; + +export interface StoreDelta { + /** + * Items updated since the last delta + */ + updates: T[]; + /** + * The IDs of any deleted items + */ + deletes: string[]; + /** + * New items added since the last delta + */ + adds: T[]; + /** + * The state of the store before any of these updates. + */ + beforeAll: T[]; + /** + * The state of the store after all of these updates. Doesn't necessarily + * reflect the current state of the underlying Storage, as it updates the local + * storage based on the known updates if fetchAroundUpdates is false + */ + afterAll: T[]; +} + +/** + * Combines several sequential deltas into a single delta. + * It performs several checks to remove redundant data. + * - Checks for repeated copies items with the same ID in + * adds and updates, or just the same ID in deletes, and keeps + * only the last. + * - Checks for deletes followed by adds or updates and replaces with a + * single update + * - Checks for adds followed by deletes and removes both + * - Checks for updates followed by deletes and removes the update + * @param instance The instance that can identify these items + * @param currentUpdate The current store delta + * @param newUpdate The new update to merge + * @returns The merged delta + */ +export function mergeDeltas( + instance: { identify(items: T | T[]): string[] }, + currentUpdate: StoreDelta, + newUpdate: StoreDelta +): StoreDelta { + /** + * Takes the last instance of an item repeated in the list + * @param items Added or updated items + * @returns The added or updated items with repeated items replaced by only the latest version of the item + */ + function takeLastItem(items: T[]): T[] { + const found: { [ index: string ]: boolean} = {}; + const ids = instance.identify(items); + return items.reverse().filter((_, index) => { + const id = ids[index]; + const exists = Boolean(found[id]); + found[id] = true; + return !exists; + }).reverse(); + } + + /** + * Takes the last instance of an id repeated in the list + * @param ids IDs of deleted items + * @returns The list with duplicates removed + */ + function takeLastId(ids: string[]): string[] { + const found: { [ index: string ]: boolean} = {}; + return ids.reverse().filter((id) => { + const exists = Boolean(found[id]); + found[id] = true; + return !exists; + }).reverse(); + } + + /** + * Removes updates for items that were later deleted + * @param newDeletes Deletes from delta(s) after the updates + * @param oldUpdates Updates from delta(s) before the deletes + * @return The updates without updates for subsequently deleted items + */ + function removeOutdatedItems(newDeletes: string[], oldUpdates: T[]) { + const deletedIds = newDeletes.reduce((prev, next) => { + prev.set(next, null); + return prev; + }, new Map()); + const ids = instance.identify(oldUpdates); + return oldUpdates.filter((_, index) => { + return !deletedIds.has(ids[index]); + }); + } + + /** + * Finds cases where an older update has an add, and a newer update has a delete, and removes + * both, since the net effect is that the operations are cancelled out + * @param newDeletes Deletes form delta(s) after the adds + * @param oldAdds Adds from delta(s) before the deletes + * @returns An object with the filtered adds and deletes + */ + function removeCancellingUpdates(newDeletes: string[], oldAdds: T[]) { + const deletedIds = newDeletes.reduce((prev, next) => { + prev.set(next, null); + return prev; + }, new Map()); + const ids = instance.identify(oldAdds); + const addIds = ids.reduce((prev, next) => { + prev.set(next, null); + return prev; + }, new Map()); + return { + oldAdds: oldAdds.filter((_, index) => { + return !deletedIds.has(ids[index]); + }), + newDeletes: newDeletes.filter((id) => !addIds.has(id)) + }; + } + + /** + * Finds places where an item was deleted and then added or updated, and removes the delete. If the item was added, + * the add is also replaced with an update since it should already exist in the collection receiving the updates, + * as it will never receive the delete + * @param oldDeletes - Deletes from delta(s) before the adds and updates + * @param newAdds - Adds from delta(s) after the deletes + * @param newUpdates - Updates from delta(s) after the deletes + * @returns An object containing the updated deletes, adds, and updates + */ + function convertReplacementToUpdate(oldDeletes: string[], newAdds: T[], newUpdates: T[]) { + const deletes = oldDeletes.reduce((prev, next) => { + prev.set(next, null); + return prev; + }, new Map()); + const addIds = instance.identify(newAdds); + const updateIds = instance.identify(newUpdates); + const adds = addIds.concat(updateIds).reduce((prev, next) => { + prev.set(next, null); + return prev; + }, new Map()); + const updatedUpdates = newUpdates.slice(); + return { + oldDeletes: oldDeletes.filter((id) => !adds.has(id)), + newAdds: newAdds.filter((item, index) => { + const shouldKeep = !deletes.has(addIds[index]); + if (!shouldKeep) { + // Always add it to the beginning, because it may have been updated as well, but the add + // has to have come first. + updatedUpdates.unshift(item); + } + return shouldKeep; + }), + newUpdates: updatedUpdates + }; + } + + const { oldDeletes, newAdds, newUpdates } = convertReplacementToUpdate( + currentUpdate.deletes, newUpdate.adds, newUpdate.updates + ); + const oldUpdates = removeOutdatedItems(newUpdate.deletes, currentUpdate.updates); + const { newDeletes, oldAdds } = removeCancellingUpdates(newUpdate.deletes, currentUpdate.adds); + return { + updates: takeLastItem([ ...oldUpdates, ...newUpdates ]), + adds: takeLastItem([ ...oldAdds, ...newAdds ]), + deletes: takeLastId([ ...oldDeletes, ...newDeletes ]), + beforeAll: currentUpdate.beforeAll, + afterAll: newUpdate.afterAll + }; +} + +/** + * An update for a single item, used to identify which item an update is for when multiple items are observed + * simultaneously. Deletes are indicated by the item property being undefined. + */ +export interface ItemUpdate { + item?: T; + id: string; +} + +export interface ObservableStoreInterface> extends Store { + /** + * Observe the entire store, receiving deltas indicating the changes to the store. + * When observing, an initial update will be sent with the last known state of the store in the `afterAll` property. + * If fetchAroundUpdates is true, the store's local data will by synchronized with the underlying Storage. + * If fetchAroundUpdates is not true, then the data will be the result of locally applying updates to the data + * retrieved from the last fetch. + */ + observe(): Observable>; + /** + * Receives the current state of the item with the specified ID whenever it is updated. This observable will be + * completed if the item is deleted + * @param id The ID of the item to observe + */ + observe(id: string): Observable; + /** + * Receives the current state of the items in an `ItemUpdate` object whenever they are updated. When any of the + * items are deleted an `ItemUpdate` with the item's ID and no item property will be sent out. When all of the + * observed items are deleted the observable will be completed. + * @param ids - The IDS of the items to observe + */ + observe(ids: string[]): Observable>; +} + +export interface ObservableStoreOptions extends StoreOptions { + /** + * If true, then the local collection will automatically fetch to get the latest data from the store whenver + * an update is made. + */ + fetchAroundUpdates?: boolean; + /** + * Specifies how long the fetch around updates should be debounced to avoid rapidly fetching when many updates + * are made within close proximity. Defaults to 200 milliseconds + */ + fetchAroundUpdateDebounce?: number; +} + +export type ObserverSetEntry = { observes: Set; observer: Observer> }; +/** + * Build a map of ids to indices for the provided collection. This requires that the array of IDs is either what + * the index if for, or that the array of items the IDs represent is in the same order, which is already the case + * if the IDs were generated using the Store's identify function. + * @param ids - The IDS to build the index for + * @returns An index mapping ids to indices + */ +export function buildIndex(ids: string[]): Map { + return ids.reduce((map, id, index) => { + map.set(id, index); + return map; + }, new Map()); +} + +/** + * Determines whether this is a single observer or a set entry + * @param observer + * @returns {boolean} + */ +function isObserverEntry(observer: Observer | ObserverSetEntry): observer is ObserverSetEntry { + return ( observer).observes instanceof Set; +} + +/** + * Determines whether this is a single observer or a set entry + * @param observer + * @returns {boolean} + */ +function isObserver(observer: Observer | ObserverSetEntry): observer is Observer { + return !isObserverEntry(observer); +} + +class ObservableStore extends StoreBase implements ObservableStoreInterface> { + protected fetchAroundUpdates: boolean; + /** + * A debounced function called to fetch the latest data and send updates to observers after each crud operation, + * if fetchAroundUpdates is true. + */ + private fetchAndSendUpdates: (store: ObservableStoreInterface>) => void; + /** + * Maps item IDs to observers for that item, or sets of observers. For Single item observers this is a one-to-many + * relationship. For `ObserverSetEntries`, this is a many to many relationship, each item can be observed as a part + * of many sets, and each set is linked to all of the items within it. + */ + private itemObservers: Map | ObserverSetEntry)[]>; + /** + * All the observers of the store + */ + private observers: Observer>[]; + /** + * The single observable provided to all observers of the store + */ + private storeObservable: Observable>; + /** + * Updates currently waiting to be merged and sent + */ + private queuedUpdate?: StoreDelta; + /** + * The latest local data + */ + private localData: T[]; + /** + * Maps item IDs to indices in `localData` + */ + private localIndex: Map; + /** + * When `fetchAroundUpdates` is true, this promise is used to wait for the first fetch before sending out initial + * updates, since `localData` will be out of date as soon as the fetch completes. + */ + private initialFetch?: Promise; + + /** + * Flag indicating that data was passed in the constructor, and we should ignore the first add + * + */ + private ignoreFirstAdd: boolean; + constructor(options?: ObservableStoreOptions) { + super(options); + options = options || {}; + this.fetchAroundUpdates = Boolean(options.fetchAroundUpdates); + this.fetchAndSendUpdates = debounce((store: ObservableStore) => { + store.fetch(); + }, options.fetchAroundUpdateDebounce || 20); + this.observers = []; + this.localData = []; + this.ignoreFirstAdd = Boolean(options.data); + this.localIndex = new Map(); + if (options.fetchAroundUpdates) { + this.initialFetch = this.fetch(); + } + this.itemObservers = new Map | ObserverSetEntry)[]>(); + + this.storeObservable = new Observable>((observer: Observer>) => { + this.observers.push(observer); + if (this.initialFetch) { + this.initialFetch.then(() => { + observer.next({ + updates: [], + deletes: [], + adds: [], + beforeAll: [], + afterAll: this.localData.slice() + }); + }); + } + else { + observer.next({ + updates: [], + deletes: [], + adds: [], + beforeAll: [], + afterAll: this.localData.slice() + }); + } + return () => { + const remove = (observer: Observer>) => { + this.observers.splice(this.observers.indexOf(observer), 1); + }; + setTimeout(() => { + remove(observer); + }); + }; + }); + } + + observe(): Observable>; + observe(id: string): Observable; + observe(ids: string[]): Observable>; + observe(idOrIds?: string | string[]): Observable> | Observable | Observable> { + if (idOrIds) { + if (Array.isArray(idOrIds)) { + const ids = idOrIds; + + const idSet = new Set(ids); + return new Observable>((observer: Observer>) => { + const observerEntry: ObserverSetEntry = { + observes: idSet, + observer: observer + }; + ids.forEach((id: string) => { + if (this.itemObservers.has(id)) { + this.itemObservers.get(id)!.push(observerEntry); + } + else { + this.itemObservers.set(id, [observerEntry]); + } + }); + const foundIds = new Set(); + after(observer, 'next', (result: any, args: IArguments) => { + const itemUpdate: ItemUpdate = args[0]; + foundIds.add(itemUpdate.id); + return result; + }); + + this.get(ids).then((items: T[]) => { + if (foundIds.size !== ids.length) { + const retrievedIdSet = new Set(this.identify(items)); + let missingItemIds = ids.filter(id => !retrievedIdSet.has(id)); + + if (retrievedIdSet.size !== idSet.size || missingItemIds.length) { + observer.error(new Error(`ID(s) "${missingItemIds}" not found in store`)); + } + else { + items.forEach((item, index) => observer.next({ + item: item, + id: ids[index] + })); + } + } + }); + }); + } + const id = idOrIds; + return new Observable((observer: Observer) => { + this.get(id).then((item: any) => { + if (!item) { + observer.error(new Error(`ID "${id}" not found in store`)); + } + else { + if (this.itemObservers.has(id)) { + this.itemObservers.get(id)!.push(observer); + } + else { + this.itemObservers.set(id, [ observer ]); + } + observer.next(item); + } + }); + }); + } + return this.storeObservable; + } + + /** + * After fetching, sends updates if no query was used. If a custom query was used then the data retrieved + * is not indicative of the local data and can't be used. We shouldn't apply the query locally because we + * have no knowledge of the underlying storage implementation or the amount of data and it may be too much + * data to retrieve or update in memory. If this is the initialFetch, don't update since that update + * will be sent to each subscriber at the time of subscription. If we're not sending updates, still set + * the local data and index to the newly retrieved data. + */ + fetch(query?: Query) { + const result = super.fetch(query); + if (!query) { + result.then( + (data) => { + if (result !== this.initialFetch) { + this.sendUpdates(data); + } + else { + this.localData = data; + this.localIndex = buildIndex(this.identify(data)); + } + }, + // Ignore errors here, they should be handled by the caller not observers + () => {} + ); + } + return result; + } + + /** + * After the put is completed, notify the item observers, and then either queue a fetch to send updates + * if fetchAroundUpdates is true, or just send updates if not. + */ + put(items: T | T[]) { + const result = super.put(items); + result.then( + (updatedItems: T[]) => { + this.notifyItemObservers(updatedItems, []); + this.sendUpdatesOrFetch(updatedItems, [], []); + }, + // Ignore errors here, they should be handled by the caller not observers + () => {} + ); + return result; + } + + /** + * After the patch is completed, notify the item observers, and then either queue a fetch to send updates + * if fetchAroundUpdates is true, or just send updates if not. + */ + patch(updates: PatchArgument, options?: CrudOptions) { + const result = super.patch(updates, options); + result.then( + (updatedItems: T[]) => { + this.notifyItemObservers(updatedItems, []); + this.sendUpdatesOrFetch(updatedItems, [], []); + }, + // Ignore errors here, they should be handled by the caller not observers + () => {} + ); + return result; + } + + /** + * After the add is completed notify observers. If this is the initial add AND we are fetching around + * updates, then the first update to subscribers will already contain this data, since the initial fetch + * is performed after the initial add. In this case we do not need to send an update. We can tell this + * is the first add because it'll be triggered in the StoreBase base before the state is created for + * this instance in the mixin's initializer + */ + add(items: T[] | T, options?: CrudOptions) { + const result = super.add(items, options); + result.then( + (addedItems: T[]) => { + if (!this.ignoreFirstAdd || !this.fetchAroundUpdates) { + this.sendUpdatesOrFetch([], addedItems, []); + } + this.ignoreFirstAdd = false; + }, + // Ignore errors here, they should be handled by the caller not observers + () => {} + ); + return result; + } + + /** + * After the items are deleted, notify item set observers of the deletion of one of the items they are + * observing, and then complete any observables that need to be completed. + * Completing observables is dones as follows + * - For observers of a single item, just complete the observer + * - For observers of a set of items + * - Remove the deleted ID of this item from the set of observed IDs + * - If there are now no observed IDs for the set, complete the observable + * - Remove the item observer entry for the deleted ID + */ + delete(ids: string[] | string) { + const result = super.delete(ids); + result.then( + (deleted: string[]) => { + this.notifyItemObservers(null, deleted); + deleted.forEach((id: string) => { + if (this.itemObservers.has(id)) { + this.itemObservers.get(id)!.forEach((observerOrEntry) => { + if (isObserver(observerOrEntry)) { + observerOrEntry.complete(); + } + else { + observerOrEntry.observes.delete(id); + if (!observerOrEntry.observes.size) { + observerOrEntry.observer.complete(); + } + } + }); + this.itemObservers.delete(id); + } + }); + this.sendUpdatesOrFetch([], [], deleted); + }, + // Ignore errors here, they should be handled by the caller not observers + () => {} + ); + return result; + } + + /** + * Merges the latest queued updates, updates the local data and index based on the latest data, + * sends out updates to observers, and then removes observers that unsubscribed during the update process from the list + * of observers. If after is provided, it is assumed that that is the latest data for the store, if it is not provided + * the local data is updated according to the merged delta and that is used as the new local data. + * @param after - Optional array of items containing the latest data for the store. + */ + private sendUpdates(after?: T[]) { + const storeDelta = this.queuedUpdate || { + updates: [], + adds: [], + deletes: [], + beforeAll: [], + afterAll: [] + }; + this.queuedUpdate = undefined; + after = after || this.addUpdateDelete(storeDelta); + + storeDelta.beforeAll = this.localData; + storeDelta.afterAll = after; + this.localData = after; + this.localIndex = buildIndex(this.identify(after)); + + this.observers.forEach((observer) => { + observer.next({ + updates: storeDelta.updates.slice(), + adds: storeDelta.adds.slice(), + deletes: storeDelta.deletes.slice(), + beforeAll: storeDelta.beforeAll.slice(), + afterAll: storeDelta.afterAll.slice() + }); + }); + } + + /** + * Takes a collection of items and creates a new copy modified according to the provided updates. This can be used to + * attempt to track updates in the local collection when fetching after each update is disabled. + * @param update + * @returns A new collection with the modifications specified by the update + */ + private addUpdateDelete(update: StoreDelta) { + const newData = this.localData.slice(); + update.adds.forEach((item) => { + newData.push(item); + }); + + this.identify(update.updates).forEach((id, index) => { + if (this.localIndex.has(id)) { + newData[this.localIndex.get(id)!] = update.updates[index]; + } + else { + newData.push(update.updates[index]); + } + }); + + update.deletes.sort().reverse().forEach((id) => { + if (this.localIndex.has(id)) { + newData.splice(this.localIndex.get(id)!, 1); + } + }); + + return newData; + } + + /** + * Iterates through the provided items and/or IDs and notifies observers. If items is provided, then the + * observers for that item, and the observers for sets of items that include that are updated. If items is null, then + * these are delete notifications for observers of multiple items. In this case, no update is sent to individual + * observers, and observers of sets receive `ItemUpdate` objects with the IDs of the deleted items and an undefined + * item + * + * @param items Items to send updates for, or null if these are delete notifications for item set observers + * @param ids - IDs of the items, should be in the same order as items + */ + private notifyItemObservers(items: T[] | null, ids: string[]) { + const notify = (id: string, after?: any) => { + if (this.itemObservers.has(id)) { + this.itemObservers.get(id)!.map((observerOrEntry): Observer> | null => { + if (isObserverEntry(observerOrEntry)) { + return observerOrEntry.observer; + } + else { + return null; + } + }).filter((observerEntry) => { + return observerEntry; + }).forEach((observer: Observer>) => { + observer.next({ + item: after, + id: id + }); + }); + if (after) { + this.itemObservers.get(id)!.map((observerOrEntry): Observer | null => { + if (isObserver(observerOrEntry)) { + return observerOrEntry; + } + else { + return null; + } + }).filter((observer) => { + return observer; + }).forEach((observer: Observer) => { + observer.next(after); + }); + } + } + }; + if (items) { + items.forEach((after: T, index: number) => { + const id = ids[index] || this.identify(after); + notify(id, after); + }); + } + else { + ids.forEach((id) => { + notify(id, undefined); + }); + } + } + + /** + * Queues the appropriate update and then either starts up a fetch or just triggers sending the updates depending + * on the `fetchAroundUpdates` property + * @param updates Updated items + * @param adds Added items + * @param deletes Deleted IDs + */ + private sendUpdatesOrFetch(updates: T[], adds: T[], deletes: string[]) { + const newUpdate = { + updates: updates, + adds: adds, + deletes: deletes, + beforeAll: [], + afterAll: [] + }; + this.queuedUpdate = this.queuedUpdate ? mergeDeltas(this, this.queuedUpdate, newUpdate) : newUpdate; + if (this.fetchAroundUpdates) { + this.fetchAndSendUpdates(this); + } + else { + this.sendUpdates(); + } + } +} + +export default ObservableStore; diff --git a/src/store/QueryResult.ts b/src/store/QueryResult.ts new file mode 100644 index 0000000..349427f --- /dev/null +++ b/src/store/QueryResult.ts @@ -0,0 +1,857 @@ +import { ItemUpdate, StoreDelta, mergeDeltas, buildIndex } from './ObservableStore'; +import { Query, QueryType, FetchResult } from '../interfaces'; +import { Observable, Observer } from '@dojo/core/Observable'; +import Patch from '../patch/Patch'; +import createFilter, { Filter } from '../query/createFilter'; +import createRange, { StoreRange } from '../query/createStoreRange'; +import createSort, { Sort } from '../query/createSort'; +import CompoundQuery from '../query/CompoundQuery'; +import Promise from '@dojo/shim/Promise'; +import Map from '@dojo/shim/Map'; +import Set from '@dojo/shim/Set'; +import { debounce } from '@dojo/core/util'; +import { isFilter, isSort, QueryableStoreInterface } from './QueryableStore'; + +export interface TrackableStoreDelta extends StoreDelta { + /** + * Contains info for any items that were formerly in the tracked collection and are now not, regardless of how + * those items were removed + */ + removedFromTracked: { item: T; id: string; previousIndex: number; }[]; + /** + * Contains info for any items that are now in the tracked collection and formerly were not, regardless of how + * those items were added + */ + addedToTracked: { item: T; id: string; index: number; }[]; + /** + * Contains info were previously and still are in the tracked collection but have changed position, regardless of + * how the items were moved. + */ + movedInTracked: { item: T; id: string; previousIndex: number; index: number }[]; +} + +/** + * Checks if this is a tracked update or not + * @param storeDelta + * @returns {Boolean} + */ +function isTracked(storeDelta: StoreDelta): storeDelta is TrackableStoreDelta { + const tracked = > storeDelta; + return Boolean(tracked.removedFromTracked || tracked.addedToTracked || tracked.movedInTracked); +} + +/** + * Describes a transformation + */ +export type TransformationDescriptor = { + transformation: Patch | ((item: F) => T); idTransform?: string | ((item: T) => string) +}; + +/** + * If this function is 'mapped'(Items can be identified), and it contains only transformations and incremental queries, + * then we can update it in place, assuming that we are notified about all changes and are starting from the correct + * data. + * @param queriesAndTransforms + * @param result + * @returns {boolean|boolean} + */ +function canUpdateInPlace( + queriesAndTransforms: Array | TransformationDescriptor>, + result: QueryResultInterface +) { + return isMapped(result) && queriesAndTransforms.every((queryOrTransformation) => + !isQuery(queryOrTransformation) || Boolean(queryOrTransformation.incremental) + ); +} + +export interface QueryableStoreInterfaceOptions> { + queriesAndTransformations: Array | TransformationDescriptor>; + source: S; + isTracking?: boolean; + trackingFetchDelay?: number; + fetchAroundUpdates: boolean; +} + +export interface QueryResultInterface> { + query(query: Query): this; + filter(filter: Filter): this; + filter(test: (item: T) => boolean): this; + range(range: StoreRange): this; + range(start: number, count: number): this; + sort(sort: Sort | ((a: T, b: T) => number) | string, descending?: boolean): this; + observe(): Observable>; + observe(id: string): Observable; + observe(ids: string[]): Observable>; + get(ids: string | string[]): Promise; + transform(transformation: Patch | ((item: T) => V)): QueryResultInterface; + transform(transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string)): MappedQueryResult; + fetch(query?: Query): FetchResult; + source: S; +} + +export interface MappedQueryResultInterface< + T, S extends QueryableStoreInterface +> extends QueryResultInterface { + /** + * Starts actively tracking this view, such that any time updates are made, this will fetch if necessary to make + * sure it has the latest data. + */ + track(): TrackedQueryResultInterface; + identify(items: T[]): string[]; + identify(item: T): string; + identify(items: T | T[]): string | string[]; + observe(): Observable>; + /** + * These overrides aren't actually changing the signature, they are just necessary to make typescript happy about + * the override of the no arg signature for observe + */ + observe(id: string): Observable; + observe(ids: string[]): Observable>; +} + +export interface TrackedQueryResultInterface< + T, S extends QueryableStoreInterface +> extends MappedQueryResultInterface { + /** + * Create a new query transform result that is not tracking the source store but represents the same queries and + * transforms + */ + release(): MappedQueryResultInterface; +} + +/** + * Check if this is a 'mapped' query transform result + * @param queryTransformResult + * @returns {boolean} + */ +function isMapped( + queryTransformResult: QueryResultInterface +): queryTransformResult is MappedQueryResultInterface { + return typeof (> queryTransformResult).track === 'function'; +} + +/** + * Check if this is a patch or just a transform function + * @param transform + * @returns {boolean} + */ +function isPatch(transform: Patch | ((item: F) => T)): transform is Patch { + return typeof transform !== 'function'; +} + +/** + * Checks if this is a query or a transformations descriptor + * @param queryOrTransformation + * @returns {boolean} + */ +function isQuery(queryOrTransformation: Query | TransformationDescriptor): queryOrTransformation is Query { + const asTransformation = queryOrTransformation as TransformationDescriptor; + const asQuery = queryOrTransformation as Query; + return !asTransformation.transformation && !asTransformation.idTransform && typeof asQuery.apply === 'function'; +} + +/** + * Checks if this is a query or a transformations descriptor + * @param queryOrTransformation + * @returns {boolean} + */ +function isTransformation(queryOrTransformation: Query | TransformationDescriptor): queryOrTransformation is TransformationDescriptor { + const asTransformation = queryOrTransformation as TransformationDescriptor; + const asQuery = queryOrTransformation as Query; + return asTransformation.transformation && typeof asQuery.apply !== 'function'; +} + +/** + * Applies only the transformations in the queries and transformations array to the provided item(s). Useful for + * converting an item from its original shape to the transformed shape when querying is not needed (e.g. for observing + * individual items). + * @param queriesAndTransformations + * @param item An item or an array of items + * @returns The transformed item or items + */ +function transformData( + queriesAndTransformations: Array | TransformationDescriptor>, + item: any | any[] +) { + function transformSingleItem(item: any) { + return queriesAndTransformations + .reduce((prev, next) => { + if (isTransformation(next)) { + const transform = next.transformation; + return isPatch(transform) ? transform.apply(prev) : transform(prev); + } + else { + return prev; + } + }, item); + } + if (Array.isArray(item)) { + return item.map(transformSingleItem); + } + else { + return transformSingleItem(item); + } +} + +/** + * Pulls the item out of an `ItemUpdate` object and then delegates to `transformData` to transform it before creating + * a new `ItemUpdate` with the modified data. + * @param queriesAndTransformations + * @param update + * @returns A new `ItemUpdate` with any transformations applied + */ +function transformItemUpdate( + queriesAndTransformations: Array | TransformationDescriptor>, + update: ItemUpdate +) { + return { + id: update.id, + item: update.item ? transformData(queriesAndTransformations, update.item) : update.item + }; +} + +export class QueryResult> implements QueryResultInterface { + /** + * The store this query transform result comes from + */ + readonly source: S; + /** + * Queries and transformations for this query transform result + */ + protected queriesAndTransformations: Array | TransformationDescriptor>; + /** + * Tracks whether we can modify the local collection in place or need to fetch to get the correct this after an + * update + */ + protected canUpdateInPlace: boolean; + /** + * Tracks whether we're tracking this collection + */ + protected isTracking?: boolean; + /** + * Optional value that indicates the amount of time to debounce the fetch called after receiving an update. + */ + protected trackingFetchDelay?: number; + /** + * A debounced function that just delegates to the instance's fetch method + * @param instance + */ + protected fetchAndSendUpdates: (instance: QueryResultInterface) => void; + /** + * The observable that observers of this query transform result will be provided + */ + protected observable: Observable>; + /** + * Observers of this query transform result + */ + protected observers: Observer>[]; + /** + * The local copy of the data for this view + */ + protected localData: T[]; + /** + * Updates ready to be send after the next fetch + */ + protected queuedUpdate?: StoreDelta; + /** + * Keeps track of new item IDs as updates are being queued + */ + protected currentUpdateIndex: Set; + /** + * Promise tracking the initial fetch if we are tracking and are not fetchingAroundUpdates + */ + protected initialFetch?: Promise; + /** + * Is the parent store fetching around updates + * If the parent store is fetching around updates, we will always have the latest superset of this view's data in + * the updates it receives locally. In that case, even if actively tracking, no additional fetches need to be + * performed, the local queries and transformations can just be applied to the new data directly. + */ + protected fetchAroundUpdates: boolean; + /** + * Maps IDs to indices in localDAta + */ + protected localIndex: Map; + + /** + * Handle to the subscription to the source store + */ + protected sourceHandle?: Promise<{ unsubscribe: Function }>; + constructor(options?: QueryableStoreInterfaceOptions) { + if (!options) { + throw Error('Query Transform result cannot be created without providing a source store'); + } + const observable = new Observable>((observer: Observer>) => { + this.observers.push(observer); + this.handleInitialNotification(observer); + return () => { + const remove = (observer: Observer>) => { + this.observers.splice(this.observers.indexOf(observer), 1); + if (!this.observers.length && this.sourceHandle) { + this.sourceHandle.then((subscription) => { + if (!this.observers.length) { + subscription.unsubscribe(); + this.sourceHandle = undefined; + } + + }); + } + }; + + // Do the actual removal on the next tick so that + // we don't remove items from the array while we're iterating through it. + setTimeout(() => { + remove(observer); + }); + }; + }); + + const updateInPlace = canUpdateInPlace(options.queriesAndTransformations, this); + + this.source = options.source; + this.observers = []; + this.canUpdateInPlace = updateInPlace; + this.observable = observable; + this.localData = []; + this.localIndex = new Map(); + this.queriesAndTransformations = options.queriesAndTransformations; + this.isTracking = options.isTracking; + this.trackingFetchDelay = options.trackingFetchDelay; + this.currentUpdateIndex = new Set(); + this.fetchAndSendUpdates = debounce((instance: QueryResultInterface) => { + instance.fetch(); + }, options.trackingFetchDelay || 20); + this.fetchAroundUpdates = options.fetchAroundUpdates; + + if (options.isTracking && !options.fetchAroundUpdates) { + this.fetch(); + } + } + + query(query: Query): this { + return new ( this.constructor)(this.getQueryOptions(query)); + } + + filter(filterOrTest: Filter | ((item: T) => boolean)) { + let filter: Filter; + if (isFilter(filterOrTest)) { + filter = filterOrTest; + } + else { + filter = createFilter().custom(<(item: T) => boolean> filterOrTest); + } + + return this.query(filter); + } + + range(rangeOrStart: StoreRange | number, count?: number) { + let range: StoreRange; + if (typeof count !== 'undefined') { + range = createRange( rangeOrStart, count); + } + else { + range = > rangeOrStart; + } + + return this.query(range); + } + + sort(sortOrComparator: Sort | ((a: T, b: T) => number), descending?: boolean) { + let sort: Sort; + if (isSort(sortOrComparator)) { + sort = sortOrComparator; + } + else { + sort = createSort(sortOrComparator, descending); + } + + return this.query(sort); + } + + observe(): Observable>; + observe(id: string): Observable; + observe(ids: string[]): Observable>; + observe(idOrIds?: string | string[]) { + if (!idOrIds) { + if (!this.sourceHandle) { + const waitForFetchPromise: Promise = this.initialFetch || Promise.resolve(); + this.sourceHandle = waitForFetchPromise.then(() => { + return this.source.observe().subscribe((update: StoreDelta) => { + this.handleUpdate(update); + }); + }); + } + return this.observable; + } + else { + if (Array.isArray(idOrIds)) { + return this.source + .observe(idOrIds) + .map((update: ItemUpdate) => transformItemUpdate(this.queriesAndTransformations, update)); + } + else { + return this.source + .observe(idOrIds) + .map((update: any) => transformData(this.queriesAndTransformations, update)); + } + } + } + + get(ids: string | string[]) { + const promise: Promise = this.initialFetch || Promise.resolve(); + const mapped = isMapped(this); + return promise.then(() => { + if (mapped) { + if (Array.isArray(ids)) { + return ids.map((id) => this.localData[this.localIndex.get(id)!]) + .filter((item) => Boolean(item)); + } + else { + return this.localData[this.localIndex.get(ids)!]; + } + } + else { + return this.source.get(ids).then((data) => { + if (Array.isArray(data)) { + return this.queryAndTransformData(data); + } + else if (data) { + return this.queryAndTransformData([ data ])[0]; + } + else { + return data; + } + }); + } + }); + } + + transform(transformation: Patch | ((item: T) => V)): QueryResultInterface; + transform(transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string)): MappedQueryResult; + transform( + transformation: Patch | ((item: any) => V), + idTransform?: string | ((item: V) => string) + ): QueryResultInterface | MappedQueryResult { + const options: QueryableStoreInterfaceOptions = { + source: this.source, + queriesAndTransformations: [ + ...this.queriesAndTransformations, + { transformation: transformation, idTransform: idTransform } + ], + trackingFetchDelay: this.trackingFetchDelay, + fetchAroundUpdates: this.fetchAroundUpdates + }; + if (idTransform) { + return new MappedQueryResult(options); + } + else { + return new QueryResult(options); + } + } + + fetch(query?: Query): FetchResult { + let firstQuery = new CompoundQuery(); + const queriesAndTransformations = this.queriesAndTransformations.slice(); + let nextQuery = queriesAndTransformations.shift(); + // Get the queries that can be passed through to the store. This includes all queries up to and including the + // first non incremental query(e.g. a range query) or up to and not including the first transformation + while (nextQuery && isQuery(nextQuery) && nextQuery.incremental) { + firstQuery = firstQuery.withQuery(nextQuery); + nextQuery = queriesAndTransformations.shift(); + } + if (nextQuery && isQuery(nextQuery)) { + firstQuery = firstQuery.withQuery(nextQuery); + } + else if (nextQuery) { + queriesAndTransformations.unshift(nextQuery); + } + + const mapped: MappedQueryResultInterface | undefined = isMapped(this) ? + this as MappedQueryResultInterface : undefined; + let nextUpdate: StoreDelta = (this.queuedUpdate && mapped) ? this.queuedUpdate : { + adds: [], + updates: [], + deletes: [], + beforeAll: [], + afterAll: [] + }; + this.currentUpdateIndex.clear(); + this.queuedUpdate = undefined; + + let resolveTotalLength: Function | undefined = undefined; + let rejectTotalLength: Function | undefined = undefined; + const totalLength = new Promise((resolve, reject) => { + resolveTotalLength = resolve; + rejectTotalLength = reject; + }); + let resolveDataLength: Function; + let rejectDataLength: Function; + const dataLength = new Promise((resolve, reject) => { + resolveDataLength = resolve; + rejectDataLength = reject; + }); + const fetchResult = this.source.fetch(firstQuery); + const resultsPromise: FetchResult = fetchResult.then( + (newData: any[]) => { + // We should apply the query transform result's own queries first so that the total size of the locally + // cached data can be determined + newData = this.queryAndTransformData(newData, queriesAndTransformations); + resolveDataLength(newData.length); + + this.updateMappedState(newData, resultsPromise, nextUpdate); + if (query) { + newData = query.apply(newData); + } + return newData; + }, + (error: any) => { + rejectDataLength(error); + throw error; + }); + fetchResult.totalLength.then(resolveTotalLength, rejectTotalLength); + resultsPromise.dataLength = dataLength; + resultsPromise.totalLength = totalLength; + + if (!this.initialFetch) { + this.initialFetch = resultsPromise; + } + + return resultsPromise; + } + + protected handleUpdate(update: StoreDelta) { + update = this.localizeUpdate(update); + this.sendUpdate(update); + } + + /** + * Sends the update if it actually represents any change in the data, and then removes observers that unsubscribed + * from the list. + * @param update + */ + protected sendUpdate(update: StoreDelta) { + // Don't send an update if nothing happened + if (update.deletes.length || update.updates.length || update.adds.length || ( + isTracked(update) && ( + update.movedInTracked.length || update.addedToTracked.length || update.removedFromTracked.length + ) + )) { + this.observers.forEach(function(observer) { + if (isTracked(update)) { + observer.next({ + updates: update.updates.slice(), + adds: update.adds.slice(), + deletes: update.deletes.slice(), + afterAll: update.afterAll.slice(), + beforeAll: update.beforeAll.slice(), + movedInTracked: update.movedInTracked.slice(), + removedFromTracked: update.removedFromTracked.slice(), + addedToTracked: update.addedToTracked.slice() + } as TrackableStoreDelta); + } + else { + observer.next({ + updates: update.updates.slice(), + adds: update.adds.slice(), + deletes: update.deletes.slice(), + afterAll: update.afterAll.slice(), + beforeAll: update.beforeAll.slice() + }); + } + }); + } + } + + /** + * Removes items from adds and updates, and IDs from deletes, that don't belong in this query transform result. The + * observers of this view don't want to see unrelated updates. currentUpdateIndex is used when operating on batch + * updates. If updates are processed in a batch, an item might be added in one, and then removed in a later update. The + * newly added item will not yet be represented in the local data because the update needs to be localized before it + * can be used to update the local data. A single map can be passed as the currentUpdateIndex in multiple calls to + * localizeUpdate, and can then serve as a signal that even though a deleted ID isn't in the local index it is still + * a relevant update + * @param update + * @param instance + * @param currentUpdateIndex + * @returns {{deletes: string[], adds: any[], updates: any[], beforeAll: any[], afterAll: any[]}} + */ + protected localizeUpdate( + update: StoreDelta, + instance?: MappedQueryResultInterface, + currentUpdateIndex?: Set + ) { + + // Don't apply range queries, sorts, etc. to adds and updates, because those don't make sense in that context + const adds = this.queryAndTransformData(update.adds, undefined, undefined, true, true); + const updates = this.queryAndTransformData(update.updates, undefined, instance, true, true); + if (instance && currentUpdateIndex) { + instance.identify(adds.concat(updates)).map((id) => currentUpdateIndex.add(id)); + } + const deletes = update.deletes.filter((id) => + this.localIndex.has(id) || currentUpdateIndex && currentUpdateIndex.has(id) + ); + // Applying range queries to beforeAll and afterAll may not be completely accurate, in the case that + // we are not eagerly fetching or tracking, but the data would definitely not be accurate if we don't apply them + // and we shouldn't be returning more data than the queries require. + const beforeAll = this.queryAndTransformData(update.beforeAll); + const afterAll = this.queryAndTransformData(update.afterAll); + + return { + deletes: deletes, + adds: adds, + updates: updates, + beforeAll: beforeAll, + afterAll: afterAll + }; + } + + /** + * Applies all of the provided queries and transformations to the data, with some optional changes + * - If the instance and this are provided, then the localIndex will be checked and any items in it will be kept + * even if they would be otherwise eliminated by a filter. This is used specifically for updates, since if an item + * no longer satisifies the filters but is in the local index that means it has been modified and as a result removed + * from the tracked filter. We still want to have access to the new data for inclusion in the `removedFromTracked` + * update so that the user sees how the item changed to be removed from the collection. + * - If `ignoreSorts` is true, then sorts are not applied. This is useful for just filtering out data when it's not + * actually being used to represent the final, tracked, collection + * - If `ignoreNonIncrementalQueries` is true, non-incremental queries like ranges are ignored. Similar to ignoreSorts, + * this is used when the data being transformed is not the full data set, since in that case non incremental queries + * are meaningless. + * + * @param data + * @param queriesAndTransformations + * @param instance + * @param ignoreSorts + * @param ignoreNonIncrementalQueries + * @returns {any[]} + */ + protected queryAndTransformData( + data: T[], + queriesAndTransformations?: Array | TransformationDescriptor>, + instance?: MappedQueryResultInterface, + ignoreSorts = false, + ignoreNonIncrementalQueries = false + ) { + return (queriesAndTransformations || this.queriesAndTransformations).reduce((prev, next) => { + if (isTransformation(next)) { + return transformData([ next ], prev); + } + else { + if ((!ignoreSorts || next.queryType !== QueryType.Sort) && (!ignoreNonIncrementalQueries || next.incremental)) { + if (instance && isFilter(next)) { + return next + .or(createFilter().custom((item: T) => this.localIndex.has(instance.identify(item)))) + .apply(prev); + } + else { + return next.apply(prev); + } + } + else { + return prev; + } + } + }, data); + } + protected handleInitialNotification(observer: Observer>) { + observer.next({ + updates: [], + adds: [], + deletes: [], + beforeAll: [], + afterAll: this.localData.slice() + }); + } + // Extension point for Mapped update + protected updateMappedState(newData: T[], resultsPromise: Promise, nextUpdate: StoreDelta) { } + + protected getQueryOptions(query: Query) { + return { + source: this.source, + queriesAndTransformations: [ ...this.queriesAndTransformations, query ], + trackingFetchDelay: this.trackingFetchDelay, + fetchAroundUpdates: this.fetchAroundUpdates + }; + } +} + +export class MappedQueryResult< + T, S extends QueryableStoreInterface +> extends QueryResult implements MappedQueryResultInterface { + track(): TrackedQueryResultInterface { + return new TrackedQueryResult({ + isTracking: true, + source: this.source, + trackingFetchDelay: this.trackingFetchDelay, + queriesAndTransformations: this.queriesAndTransformations, + fetchAroundUpdates: this.fetchAroundUpdates + }); + } + + identify(item: T): string; + identify(items: T[]): string[]; + identify(items: T[] | T): string | string[] { + const lastTransformation = this.queriesAndTransformations.reduce | undefined>( + (prev, next) => isTransformation(next) ? next : prev, undefined + ); + const itemArray = Array.isArray(items) ? items : [ items ]; + if (lastTransformation) { + const idTransform = lastTransformation.idTransform!; + if (typeof idTransform === 'string') { + return itemArray.map((item) => ( item)[idTransform]); + } + else { + return itemArray.map(idTransform); + } + } + return this.source.identify(items); + } + + protected handleUpdate(update: StoreDelta) { + if (this.fetchAroundUpdates || !this.isTracking) { + update = this.localizeUpdate(update, this); + const newData = update.afterAll; + const ids = this.identify(newData); + const newIndex = buildIndex(Array.isArray(ids) ? ids : [ ids ]); + this.sendTrackedUpdate(newData, newIndex, update); + this.localData = newData; + this.localIndex = newIndex; + } + else { + // Combine batched updates, use `currentUpdateIndex` to make sure deletes of items added and then deleted within + // the span of the queued updates are not lost. These will be cancelled out by mergeDeltas, but both need + // to be there to properly get cancelled out, otherwise the delete gets removed and the add survives, resulting + // in an incorrect update + update = this.localizeUpdate(update, this, this.currentUpdateIndex); + this.queuedUpdate = this.queuedUpdate ? + mergeDeltas(this, this.queuedUpdate, update) : update; + // Unfortunately if we have a non-incremental query and we are tracking, we will need to fetch + // after each update. This is debounced to avoid rapidly issuing fetch requests in the case that a + // series of updates are received in a short amount of time. + this.fetchAndSendUpdates(this); + } + } + /** + * Compares the latest data to the previous local data to build the change records for a TrackedStoreDelta. Delegates + * to `sendUpdate` to actually send the update to observers. + * @param newData + * @param newIndex + * @param update + */ + protected sendTrackedUpdate(newData: T[], newIndex: Map, update: StoreDelta) { + const removedFromTracked: { item: T; id: string; previousIndex: number; }[] = []; + const addedToTracked: { item: T; id: string; index: number; }[] = []; + const movedInTracked: { item: T; id: string; previousIndex: number; index: number }[] = []; + + const updateMap = this.identify(update.updates).reduce((prev, next, index) => { + prev.set(next, update.updates[index]); + return prev; + }, new Map()); + // Check updates for removals first as it will have the latest data for items moved out of + // the tracked collection. + updateMap.forEach((item, id) => { + if (!newIndex.has(id) && this.localIndex.has(id)) { + removedFromTracked.push({ + item: item, + id: id, + previousIndex: this.localIndex.get(id)! + }); + } + }); + // Handle removals and moves + this.localIndex.forEach((previousIndex, id) => { + if (!newIndex.has(id) && !updateMap.has(id)) { + removedFromTracked.push({ + item: this.localData[previousIndex], + id: id, + previousIndex: previousIndex + }); + } + else if (this.localIndex.get(id) !== newIndex.get(id)) { + const index = newIndex.get(id)!; + movedInTracked.push({ + item: newData[index], + id: id, + index: index, + previousIndex: previousIndex + }); + } + }); + + // Handle additions + newIndex.forEach((index, id) => { + if (!this.localIndex.has(id)) { + addedToTracked.push({ + item: newData[index], + id: id, + index: index + }); + } + }); + + const trackedUpdate: TrackableStoreDelta = { + updates: update.updates, + adds: update.adds, + deletes: update.deletes, + removedFromTracked: removedFromTracked, + movedInTracked: movedInTracked, + addedToTracked: addedToTracked, + beforeAll: update.beforeAll, + afterAll: update.afterAll + }; + + this.sendUpdate(trackedUpdate); + } + + protected handleInitialNotification(observer: Observer>) { + const fetchPromise: Promise = this.initialFetch || Promise.resolve(); + fetchPromise.then(() => { + const addedToTracked: { item: any; id: string; index: number; }[] = []; + this.localIndex.forEach((index, id) => { + addedToTracked.push({ + index: index, + item: this.localData[index], + id: id + }); + }); + const trackedDelta: TrackableStoreDelta = { + updates: [], + deletes: [], + adds: [], + addedToTracked: addedToTracked.slice(), + removedFromTracked: [], + movedInTracked: [], + afterAll: this.localData.slice(), + beforeAll: [] + }; + observer.next(trackedDelta); + }); + } + protected updateMappedState(newData: T[], resultsPromise: Promise, nextUpdate: StoreDelta) { + const ids = this.identify(newData); + const newIndex = buildIndex(ids); + // Update this way if this is not an initial fetch. If this is the initial fetch, then this + // data (or subsequent data) will already be provided to observers in the initial notification, so don't + // send a redundant one. + if (resultsPromise !== this.initialFetch) { + nextUpdate.beforeAll = this.localData; + nextUpdate.afterAll = newData; + this.sendTrackedUpdate(newData, newIndex, nextUpdate); + } + this.localIndex = newIndex; + this.localData = newData; + } +} + +export class TrackedQueryResult< + T, S extends QueryableStoreInterface +> extends MappedQueryResult implements TrackedQueryResultInterface { + release() { + return new MappedQueryResult({ + isTracking: false, + source: this.source, + queriesAndTransformations: this.queriesAndTransformations, + fetchAroundUpdates: this.fetchAroundUpdates + }); + } +} diff --git a/src/store/QueryableStore.ts b/src/store/QueryableStore.ts new file mode 100644 index 0000000..bc5b4a8 --- /dev/null +++ b/src/store/QueryableStore.ts @@ -0,0 +1,149 @@ +import { Query, QueryType, CrudOptions, UpdateResults } from '../interfaces'; +import createFilter, { Filter } from '../query/createFilter'; +import createRange, { StoreRange } from '../query/createStoreRange'; +import createSort, { Sort } from '../query/createSort'; +import Patch from '../patch/Patch'; +import { + MappedQueryResult, QueryResultInterface, MappedQueryResultInterface, QueryResult +} from './QueryResult'; +import ObservableStore, { ObservableStoreInterface } from './ObservableStore'; + +export interface QueryableStoreInterface< + T, O extends CrudOptions, U extends UpdateResults +> extends ObservableStoreInterface { + /** + * Creates a query transform result with the provided query + * @param query + */ + query(query: Query): MappedQueryResultInterface; + /** + * Creates a query transform result with the provided filter + * @param filter + */ + filter(filter: Filter): MappedQueryResultInterface; + /** + * Creates a query transform result with a filter built from the provided test + * @param test + */ + filter(test: (item: T) => boolean): MappedQueryResultInterface; + /** + * Creates a query transform result with the provided range + * @param range + */ + range(range: StoreRange): MappedQueryResultInterface; + /** + * Creates a query transform result with a range built based on the provided start and count + * @param start + * @param cound + */ + range(start: number, count: number): MappedQueryResultInterface; + /** + * Creates a query transform result with the provided sort or a sort build from the provided comparator or a + * comparator for the specified property + * @param sort + * @param descending + */ + sort(sort: Sort | ((a: T, b: T) => number) | keyof T, descending?: boolean): MappedQueryResultInterface; + /** + * Create a query transform result that cannot be tracked, and cannot send tracked updates. This is the case because + * the resulting query transform result will have no way to identify items, making it impossible to determine + * whether their position has shifted or differentiating between updates and adds + * @param transformation + */ + transform(transformation: Patch | ((item: T) => V)): QueryResultInterface; + /** + * Create a trackable query transform result with the specified transformation + * @param transformation + * @param idTransform + */ + transform( + transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string) + ): MappedQueryResultInterface; +} + +/** + * Check if this is a filter query or just a test function + * @param filterOrTest + * @returns {boolean} + */ +export function isFilter(filterOrTest: Query | ((item: T) => boolean)): filterOrTest is Filter { + return typeof filterOrTest !== 'function' && (> filterOrTest).queryType === QueryType.Filter; +} + +/** + * Check if this is a sort query or just a comparator + * @param sortOrComparator + * @returns {boolean} + */ +export function isSort(sortOrComparator: Sort | ((a: T, b: T) => number) | keyof T): sortOrComparator is Sort { + const paramType = typeof sortOrComparator; + return paramType !== 'function' && paramType !== 'string' && typeof (> sortOrComparator).apply === 'function'; +} + +class QueryableStore extends ObservableStore implements QueryableStoreInterface> { + query(query: Query): MappedQueryResultInterface { + return new MappedQueryResult({ + source: this, + queriesAndTransformations: [ query ], + fetchAroundUpdates: this.fetchAroundUpdates + }); + } + + filter(filterOrTest: Filter | ((item: T) => boolean)) { + let filter: Filter; + if (isFilter(filterOrTest)) { + filter = filterOrTest; + } + else { + filter = createFilter().custom(<(item: T) => boolean> filterOrTest); + } + + return this.query(filter); + } + + range(rangeOrStart: StoreRange | number, count?: number) { + let range: StoreRange; + if (typeof count !== 'undefined') { + range = createRange( rangeOrStart, count); + } + else { + range = > rangeOrStart; + } + + return this.query(range); + } + + sort(sortOrComparator: Sort | ((a: T, b: T) => number) | keyof T, descending?: boolean) { + let sort: Sort; + if (isSort(sortOrComparator)) { + sort = sortOrComparator; + } + else { + sort = createSort(sortOrComparator, descending); + } + + return this.query(sort); + } + + transform(transformation: Patch | ((item: T) => V)): QueryResultInterface; + transform( + transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string) + ): MappedQueryResultInterface; + transform( + transformation: Patch | ((item: T) => V), idTransform?: string | ((item: V) => string) + ): QueryResultInterface | MappedQueryResultInterface { + const options = { + source: this, + queriesAndTransformations: [ { transformation: transformation, idTransform: idTransform} ], + fetchAroundUpdates: this.fetchAroundUpdates + }; + if (idTransform) { + return new MappedQueryResult(options); + } + else { + return new QueryResult(options); + } + } +} + +export default QueryableStore; diff --git a/src/store/StoreBase.ts b/src/store/StoreBase.ts new file mode 100644 index 0000000..ede27f5 --- /dev/null +++ b/src/store/StoreBase.ts @@ -0,0 +1,167 @@ +import { + Storage, Query, CrudOptions, UpdateResults, Store, StoreOptions, PatchArgument, FetchResult, PatchMapEntry, + StoreObservable +} from '../interfaces'; +import Promise from '@dojo/shim/Promise'; +import Map from '@dojo/shim/Map'; +import { duplicate } from '@dojo/core/lang'; +import { Observer, Observable } from '@dojo/core/Observable'; +import Patch, { diff } from '../patch/Patch'; +import _createStoreObservable from './createStoreObservable'; +import InMemoryStorage from '../storage/InMemoryStorage'; +function isPatchArray(patches: any[]): patches is { id: string; patch: Patch}[] { + return isPatch(patches[0]); +} + +function isPatch(patchObj: any): patchObj is {id: string; patch: Patch } { + const patch = patchObj && patchObj.patch; + const id = patchObj && patchObj.id; + return typeof id === 'string' && patch && Array.isArray(patch.operations) && typeof patch.apply === 'function' && + typeof patch.toString === 'function'; +} + +function createStoreObservable(storeResultsPromise: Promise>) { + + return _createStoreObservable( + new Observable>(function subscribe(observer: Observer>) { + storeResultsPromise + .then(function(results) { + observer.next(results); + observer.complete(); + }, function(error) { + observer.error(error); + }); + }), + function(results: UpdateResults<{}>) { + return results.successfulData; + } + ); +} + +export default class StoreBase implements Store> { + private storage: Storage; + private initialAddPromise: Promise; + constructor(options?: StoreOptions) { + if (!options) { + options = {}; + } + const data: T[] | undefined = options.data; + this.storage = options.storage || new InMemoryStorage(options); + this.initialAddPromise = Promise.resolve(); + if (data) { + this.initialAddPromise = this.add(data).catch((error) => { + console.error(error); + }); + } + } + + get(ids: string[]): Promise; + get(id: string): Promise; + get(ids: string[] | string): Promise { + return this.initialAddPromise.then(() => { + if (Array.isArray(ids)) { + return this.storage.get(ids).then((items) => items.filter((item) => Boolean(item))); + } + else { + return this.storage.get([ids]).then(items => items[0]); + } + }); + } + + add(items: T[] | T, options?: CrudOptions): StoreObservable> { + const storeResultsPromise = this.initialAddPromise.then(() => { + return this.storage.add(Array.isArray(items) ? items : [ items ], options); + }); + return createStoreObservable(storeResultsPromise); + } + + put(items: T[] | T, options?: CrudOptions): StoreObservable> { + const storeResultsPromise = this.initialAddPromise.then(() => { + return this.storage.put(Array.isArray(items) ? items : [ items ], options); + }); + + return createStoreObservable(storeResultsPromise); + } + + patch(updates: PatchArgument, options?: CrudOptions): StoreObservable> { + let patchEntries: PatchMapEntry[] = []; + if (Array.isArray(updates)) { + if (isPatchArray(updates)) { + patchEntries = updates; + } + else { + patchEntries = updates.map(({ id }, index) => { + const dupe = duplicate(updates[index]); + delete dupe.id; + return { id: id, patch: diff(dupe)}; + }); + } + } + else if (updates instanceof Map) { + updates.forEach(function(value, key) { + patchEntries.push({ + id: key, + patch: value + }); + }); + } + else if (isPatch(updates)) { + patchEntries = [ updates ]; + } + else { + const dupe = duplicate(updates); + const idInOptions = (options && options.id); + const id = idInOptions || dupe.id; + if (!idInOptions) { + delete dupe.id; + } + patchEntries = [ { id: id, patch: diff(dupe) }]; + } + + const storeResultsPromise = this.initialAddPromise.then(() => { + return this.storage.patch(patchEntries); + }); + + return createStoreObservable(storeResultsPromise); + } + + delete(ids: string | string[]): StoreObservable> { + const storeResultsPromise = this.initialAddPromise.then(() => { + return this.storage.delete(Array.isArray(ids) ? ids : [ ids ]); + }); + + return createStoreObservable(storeResultsPromise); + } + + fetch(query?: Query) { + let resolveTotalLength: (totalLength: number) => void; + let rejectTotalLength: (error: any) => void; + const totalLength = new Promise((resolve, reject) => { + resolveTotalLength = resolve; + rejectTotalLength = reject; + }); + const fetchResult: FetchResult = this.initialAddPromise.then(() => { + const result = this.storage.fetch(query); + result.totalLength.then(resolveTotalLength, rejectTotalLength); + return result; + }); + fetchResult.totalLength = fetchResult.dataLength = totalLength; + + return fetchResult; + } + + identify(items: T[]): string[]; + identify(items: T): string; + identify(items: T | T[]): string | string[] { + if (Array.isArray(items)) { + return this.storage.identify(items); + } + else { + return this.storage.identify([items])[0]; + } + } + + createId() { + return this.storage.createId(); + } +} diff --git a/src/store/createQueryTransformResult.ts b/src/store/createQueryTransformResult.ts deleted file mode 100644 index 470e5a4..0000000 --- a/src/store/createQueryTransformResult.ts +++ /dev/null @@ -1,889 +0,0 @@ -import { ObservableStore, ItemUpdate, StoreDelta, mergeDeltas, buildIndex } from './mixins/createObservableStoreMixin'; -import { Query, QueryType } from '../query/interfaces'; -import { Observable, Observer } from '@dojo/core/Observable'; -import Patch from '../patch/Patch'; -import compose, { ComposeFactory } from '@dojo/compose/compose'; -import createFilter, { Filter } from '../query/createFilter'; -import createRange, { StoreRange } from '../query/createStoreRange'; -import createSort, { Sort } from '../query/createSort'; -import CompoundQuery from '../query/CompoundQuery'; -import Promise from '@dojo/shim/Promise'; -import Map from '@dojo/shim/Map'; -import Set from '@dojo/shim/Set'; -import WeakMap from '@dojo/shim/WeakMap'; -import { debounce } from '@dojo/core/util'; -import { isFilter, isSort, QueryTransformMixin } from './mixins/createQueryTransformMixin'; -import { FetchResult } from '../storage/InMemoryStorage'; - -export interface TrackableStoreDelta extends StoreDelta { - /** - * Contains info for any items that were formerly in the tracked collection and are now not, regardless of how - * those items were removed - */ - removedFromTracked: { item: T; id: string; previousIndex: number; }[]; - /** - * Contains info for any items that are now in the tracked collection and formerly were not, regardless of how - * those items were added - */ - addedToTracked: { item: T; id: string; index: number; }[]; - /** - * Contains info were previously and still are in the tracked collection but have changed position, regardless of - * how the items were moved. - */ - movedInTracked: { item: T; id: string; previousIndex: number; index: number }[]; -} - -/** - * Checks if this is a tracked update or not - * @param storeDelta - * @returns {Boolean} - */ -function isTracked(storeDelta: StoreDelta): storeDelta is TrackableStoreDelta { - const tracked = > storeDelta; - return Boolean(tracked.removedFromTracked || tracked.addedToTracked || tracked.movedInTracked); -} - -/** - * Describes a transformation - */ -export type TransformationDescriptor = { - transformation: Patch | ((item: F) => T); idTransform?: string | ((item: T) => string) -}; - -/** - * If this function is 'mapped'(Items can be identified), and it contains only transformations and incremental queries, - * then we can update it in place, assuming that we are notified about all changes and are starting from the correct - * data. - * @param queriesAndTransforms - * @param result - * @returns {boolean|boolean} - */ -function canUpdateInPlace( - queriesAndTransforms: Array | TransformationDescriptor>, - result: QueryTransformResult -) { - return isMapped(result) && queriesAndTransforms.every((queryOrTransformation) => - !isQuery(queryOrTransformation) || Boolean(queryOrTransformation.incremental) - ); -} - -export interface QueryTransformState & QueryTransformMixin> { - /** - * Queries and transformations for this query transform result - */ - queriesAndTransformations: Array | TransformationDescriptor>; - /** - * Tracks whether we can modify the local collection in place or need to fetch to get the correct state after an - * update - */ - canUpdateInPlace: boolean; - /** - * Tracks whether we're tracking this collection - */ - isTracking?: boolean; - /** - * Optional value that indicates the amount of time to debounce the fetch called after receiving an update. - */ - trackingFetchDelay?: number; - /** - * A debounced function that just delegates to the instance's fetch method - * @param instance - */ - fetchAndSendUpdates: (instance: QueryTransformResult) => void; - /** - * The store this query transform result comes from - */ - source: S; - /** - * The observable that observers of this query transform result will be provided - */ - observable: Observable>; - /** - * Observers of this query transform result - */ - observers: Observer>[]; - /** - * The local copy of the data for this view - */ - localData: T[]; - /** - * Updates ready to be send after the next fetch - */ - queuedUpdate?: StoreDelta; - /** - * Keeps track of new item IDs as updates are being queued - */ - currentUpdateIndex: Set; - /** - * Promise tracking the initial fetch if we are tracking and are not fetchingAroundUpdates - */ - initialFetch?: Promise; - /** - * Is the parent store fetching around updates - * If the parent store is fetching around updates, we will always have the latest superset of this view's data in - * the updates it receives locally. In that case, even if actively tracking, no additional fetches need to be - * performed, the local queries and transformations can just be applied to the new data directly. - */ - fetchAroundUpdates: boolean; - /** - * Maps IDs to indices in localDAta - */ - localIndex: Map; - - /** - * Handle to the subscription to the source store - */ - sourceHandle?: Promise<{ unsubscribe: Function }>; -} - -export interface QueryTransformOptions & QueryTransformMixin> { - queriesAndTransformations: Array | TransformationDescriptor>; - source: S; - isTracking?: boolean; - trackingFetchDelay?: number; - fetchAroundUpdates: boolean; -} - -export interface QueryTransformResult & QueryTransformMixin> { - query(query: Query): this; - filter(filter: Filter): this; - filter(test: (item: T) => boolean): this; - range(range: StoreRange): this; - range(start: number, count: number): this; - sort(sort: Sort | ((a: T, b: T) => number) | string, descending?: boolean): this; - observe(): Observable>; - observe(id: string): Observable; - observe(ids: string[]): Observable>; - get(ids: string | string[]): Promise; - transform(transformation: Patch | ((item: T) => V)): QueryTransformResult; - transform(transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string)): MappedQueryTransformResult; - fetch(query?: Query): FetchResult; - source: S; -} - -export interface MappedQueryTransformResult & QueryTransformMixin> extends QueryTransformResult { - /** - * Starts actively tracking this view, such that any time updates are made, this will fetch if necessary to make - * sure it has the latest data. - */ - track(): TrackedQueryTransformResult; - identify(items: T[]): string[]; - identify(item: T): string; - identify(items: T | T[]): string | string[]; - observe(): Observable>; - /** - * These overrides aren't actually changing the signature, they are just necessary to make typescript happy about - * the override of the no arg signature for observe - */ - observe(id: string): Observable; - observe(ids: string[]): Observable>; -} - -export interface TrackedQueryTransformResult & QueryTransformMixin> extends MappedQueryTransformResult { - /** - * Create a new query transform result that is not tracking the source store but represents the same queries and - * transforms - */ - release(): MappedQueryTransformResult; -} - -/** - * Check if this is a 'mapped' query transform result - * @param queryTransformResult - * @returns {boolean} - */ -function isMapped( - queryTransformResult: QueryTransformResult -): queryTransformResult is MappedQueryTransformResult { - return typeof (> queryTransformResult).track === 'function'; -} - -/** - * Check if this is a patch or just a transform function - * @param transform - * @returns {boolean} - */ -function isPatch(transform: Patch | ((item: F) => T)): transform is Patch { - return typeof transform !== 'function'; -} - -/** - * Checks if this is a query or a transformations descriptor - * @param queryOrTransformation - * @returns {boolean} - */ -function isQuery(queryOrTransformation: Query | TransformationDescriptor): queryOrTransformation is Query { - const asTransformation = queryOrTransformation as TransformationDescriptor; - const asQuery = queryOrTransformation as Query; - return !asTransformation.transformation && !asTransformation.idTransform && typeof asQuery.apply === 'function'; -} - -/** - * Checks if this is a query or a transformations descriptor - * @param queryOrTransformation - * @returns {boolean} - */ -function isTransformation(queryOrTransformation: Query | TransformationDescriptor): queryOrTransformation is TransformationDescriptor { - const asTransformation = queryOrTransformation as TransformationDescriptor; - const asQuery = queryOrTransformation as Query; - return asTransformation.transformation && typeof asQuery.apply !== 'function'; -} - -/** - * Applies only the transformations in the queries and transformations array to the provided item(s). Useful for - * converting an item from its original shape to the transformed shape when querying is not needed (e.g. for observing - * individual items). - * @param queriesAndTransformations - * @param item An item or an array of items - * @returns The transformed item or items - */ -function transformData( - queriesAndTransformations: Array | TransformationDescriptor>, - item: any | any[] -) { - function transformSingleItem(item: any) { - return queriesAndTransformations - .reduce((prev, next) => { - if (isTransformation(next)) { - const transform = next.transformation; - return isPatch(transform) ? transform.apply(prev) : transform(prev); - } - else { - return prev; - } - }, item); - } - if (Array.isArray(item)) { - return item.map(transformSingleItem); - } - else { - return transformSingleItem(item); - } -} - -/** - * Pulls the item out of an `ItemUpdate` object and then delegates to `transformData` to transform it before creating - * a new `ItemUpdate` with the modified data. - * @param queriesAndTransformations - * @param update - * @returns A new `ItemUpdate` with any transformations applied - */ -function transformItemUpdate( - queriesAndTransformations: Array | TransformationDescriptor>, - update: ItemUpdate -) { - return { - id: update.id, - item: update.item ? transformData(queriesAndTransformations, update.item) : update.item - }; -} - -/** - * Compares the latest data to the previous local data to build the change records for a TrackedStoreDelta. Delegates - * to `sendUpdate` to actually send the update to observers. - * @param state - * @param instance - * @param newData - * @param newIndex - * @param update - */ -function sendTrackedUpdate & QueryTransformMixin>( - state: QueryTransformState, - instance: MappedQueryTransformResult, - newData: T[], - newIndex: Map, - update: StoreDelta) { - const removedFromTracked: { item: T; id: string; previousIndex: number; }[] = []; - const addedToTracked: { item: T; id: string; index: number; }[] = []; - const movedInTracked: { item: T; id: string; previousIndex: number; index: number }[] = []; - - const updateMap = instance.identify(update.updates).reduce((prev, next, index) => { - prev.set(next, update.updates[index]); - return prev; - }, new Map()); - // Check updates for removals first as it will have the latest data for items moved out of - // the tracked collection. - updateMap.forEach((item, id) => { - if (!newIndex.has(id) && state.localIndex.has(id)) { - removedFromTracked.push({ - item: item, - id: id, - previousIndex: state.localIndex.get(id)! - }); - } - }); - // Handle removals and moves - state.localIndex.forEach((previousIndex, id) => { - if (!newIndex.has(id) && !updateMap.has(id)) { - removedFromTracked.push({ - item: state.localData[previousIndex], - id: id, - previousIndex: previousIndex - }); - } - else if (state.localIndex.get(id) !== newIndex.get(id)) { - const index = newIndex.get(id)!; - movedInTracked.push({ - item: newData[index], - id: id, - index: index, - previousIndex: previousIndex - }); - } - }); - - // Handle additions - newIndex.forEach((index, id) => { - if (!state.localIndex.has(id)) { - addedToTracked.push({ - item: newData[index], - id: id, - index: index - }); - } - }); - - const trackedUpdate: TrackableStoreDelta = { - updates: update.updates, - adds: update.adds, - deletes: update.deletes, - removedFromTracked: removedFromTracked, - movedInTracked: movedInTracked, - addedToTracked: addedToTracked, - beforeAll: update.beforeAll, - afterAll: update.afterAll - }; - - sendUpdate(state, trackedUpdate); -} - -/** - * Sends the update if it actually represents any change in the data, and then removes observers that unsubscribed - * from the list. - * @param state - * @param update - */ -function sendUpdate & QueryTransformMixin>( - state: QueryTransformState, - update: StoreDelta -) { - // Don't send an update if nothing happened - if (update.deletes.length || update.updates.length || update.adds.length || ( - isTracked(update) && ( - update.movedInTracked.length || update.addedToTracked.length || update.removedFromTracked.length - ) - )) { - state.observers.forEach(function(observer) { - if (isTracked(update)) { - observer.next({ - updates: update.updates.slice(), - adds: update.adds.slice(), - deletes: update.deletes.slice(), - afterAll: update.afterAll.slice(), - beforeAll: update.beforeAll.slice(), - movedInTracked: update.movedInTracked.slice(), - removedFromTracked: update.removedFromTracked.slice(), - addedToTracked: update.addedToTracked.slice() - } as TrackableStoreDelta); - } - else { - observer.next({ - updates: update.updates.slice(), - adds: update.adds.slice(), - deletes: update.deletes.slice(), - afterAll: update.afterAll.slice(), - beforeAll: update.beforeAll.slice() - }); - } - }); - } -} - -/** - * Applies all of the provided queries and transformations to the data, with some optional changes - * - If the instance and state are provided, then the localIndex will be checked and any items in it will be kept - * even if they would be otherwise eliminated by a filter. This is used specifically for updates, since if an item - * no longer satisifies the filters but is in the local index that means it has been modified and as a result removed - * from the tracked filter. We still want to have access to the new data for inclusion in the `removedFromTracked` - * update so that the user sees how the item changed to be removed from the collection. - * - If `ignoreSorts` is true, then sorts are not applied. This is useful for just filtering out data when it's not - * actually being used to represent the final, tracked, collection - * - If `ignoreNonIncrementalQueries` is true, non-incremental queries like ranges are ignored. Similar to ignoreSorts, - * this is used when the data being transformed is not the full data set, since in that case non incremental queries - * are meaningless. - * - * @param queriesAndTransformations - * @param data - * @param instance - * @param state - * @param ignoreSorts - * @param ignoreNonIncrementalQueries - * @returns {any[]} - */ -function queryAndTransformData( - queriesAndTransformations: Array | TransformationDescriptor>, - data: T[], - instance?: MappedQueryTransformResult, - state?: QueryTransformState, - ignoreSorts = false, - ignoreNonIncrementalQueries = false -) { - return queriesAndTransformations.reduce((prev, next) => { - if (isTransformation(next)) { - return transformData([ next ], prev); - } - else { - if ((!ignoreSorts || next.queryType !== QueryType.Sort) && (!ignoreNonIncrementalQueries || next.incremental)) { - if (instance && state && isFilter(next)) { - return next - .or(createFilter().custom((item: T) => state.localIndex.has(instance.identify(item)))) - .apply(prev); - } - else { - return next.apply(prev); - } - } - else { - return prev; - } - } - }, data); -} - -/** - * Removes items from adds and updates, and IDs from deletes, that don't belong in this query transform result. The - * observers of this view don't want to see unrelated updates. currentUpdateIndex is used when operating on batch - * updates. If updates are processed in a batch, an item might be added in one, and then removed in a later update. The - * newly added item will not yet be represented in the local data because the update needs to be localized before it - * can be used to update the local data. A single map can be passed as the currentUpdateIndex in multiple calls to - * localizeUpdate, and can then serve as a signal that even though a deleted ID isn't in the local index it is still - * a relevant update - * @param state - * @param update - * @param instance - * @param currentUpdateIndex - * @returns {{deletes: string[], adds: any[], updates: any[], beforeAll: any[], afterAll: any[]}} - */ -function localizeUpdate & QueryTransformMixin>( - state: QueryTransformState, - update: StoreDelta, - instance?: MappedQueryTransformResult, - currentUpdateIndex?: Set -) { - - // Don't apply range queries, sorts, etc. to adds and updates, because those don't make sense in that context - const adds = queryAndTransformData(state.queriesAndTransformations, update.adds, undefined, undefined, true, true); - const updates = queryAndTransformData(state.queriesAndTransformations, update.updates, instance, state, true, true); - if (instance && currentUpdateIndex) { - instance.identify(adds.concat(updates)).map((id) => currentUpdateIndex.add(id)); - } - const deletes = update.deletes.filter((id) => - state.localIndex.has(id) || currentUpdateIndex && currentUpdateIndex.has(id) - ); - // Applying range queries to beforeAll and afterAll may not be completely accurate, in the case that - // we are not eagerly fetching or tracking, but the data would definitely not be accurate if we don't apply them - // and we shouldn't be returning more data than the queries require. - const beforeAll = queryAndTransformData(state.queriesAndTransformations, update.beforeAll); - const afterAll = queryAndTransformData(state.queriesAndTransformations, update.afterAll); - - return { - deletes: deletes, - adds: adds, - updates: updates, - beforeAll: beforeAll, - afterAll: afterAll - }; -} - -const instanceStateMap = new WeakMap, QueryTransformState>(); - -export interface QueryTransformResultFactory extends ComposeFactory, QueryTransformState> { - & QueryTransformMixin>(options?: QueryTransformOptions): QueryTransformResult; -} - -export interface MappedQueryTransformResultFactory extends ComposeFactory, QueryTransformState> { - & QueryTransformMixin>(options?: QueryTransformOptions): MappedQueryTransformResult; -} - -export interface TrackedQueryTransformResultFactory extends ComposeFactory, QueryTransformState> { - & QueryTransformMixin>(options?: QueryTransformOptions): TrackedQueryTransformResult; -} - -export const createQueryTransformResult: QueryTransformResultFactory = compose, any>({ - query(this: QueryTransformResult, query: Query) { - const state = instanceStateMap.get(this); - const options: QueryTransformOptions = { - source: state.source, - queriesAndTransformations: [ ...state.queriesAndTransformations, query ], - trackingFetchDelay: state.trackingFetchDelay, - fetchAroundUpdates: state.fetchAroundUpdates - }; - if (isMapped(this)) { - return createMappedQueryTransformResult(options); - } - else { - return createQueryTransformResult(options); - } - }, - filter(this: QueryTransformResult, filterOrTest: Filter | ((item: any) => boolean)) { - let filter: Filter; - if (isFilter(filterOrTest)) { - filter = filterOrTest; - } - else { - filter = createFilter().custom(<(item: any) => boolean> filterOrTest); - } - - return this.query(filter); - }, - - range(this: QueryTransformResult, rangeOrStart: StoreRange | number, count?: number) { - let range: StoreRange; - if (typeof count !== 'undefined') { - range = createRange( rangeOrStart, count); - } - else { - range = > rangeOrStart; - } - - return this.query(range); - }, - - sort(this: QueryTransformResult, sortOrComparator: Sort | ((a: any, b: any) => number), descending?: boolean) { - let sort: Sort; - if (isSort(sortOrComparator)) { - sort = sortOrComparator; - } - else { - sort = createSort(sortOrComparator, descending); - } - - return this.query(sort); - }, - observe(this: QueryTransformResult, idOrIds?: string | string[]) { - const state = instanceStateMap.get(this); - if (!idOrIds) { - if (!state.sourceHandle) { - const waitForFetchPromise: Promise = state.initialFetch || Promise.resolve(); - state.sourceHandle = waitForFetchPromise.then(() => { - return state.source.observe().subscribe((update: StoreDelta) => { - const state = instanceStateMap.get(this); - const mapped = this; - if (isMapped(mapped)) { - if (state.fetchAroundUpdates || !state.isTracking) { - update = localizeUpdate(state, update, mapped); - const newData = update.afterAll; - const newIndex = buildIndex(mapped.identify(newData)); - sendTrackedUpdate(state, mapped, newData, newIndex, update); - state.localData = newData; - state.localIndex = newIndex; - } - else { - // Combine batched updates, use `currentUpdateIndex` to make sure deletes of items added and then deleted within - // the span of the queued updates are not lost. These will be cancelled out by mergeDeltas, but both need - // to be there to properly get cancelled out, otherwise the delete gets removed and the add survives, resulting - // in an incorrect update - update = localizeUpdate(state, update, mapped, state.currentUpdateIndex); - state.queuedUpdate = state.queuedUpdate ? - mergeDeltas(mapped, state.queuedUpdate, update) : update; - // Unfortunately if we have a non-incremental query and we are tracking, we will need to fetch - // after each update. This is debounced to avoid rapidly issuing fetch requests in the case that a - // series of updates are received in a short amount of time. - state.fetchAndSendUpdates(mapped); - } - } - else { - update = localizeUpdate(state, update); - sendUpdate(state, update); - } - }); - }); - } - return state.observable; - } - else { - if (Array.isArray(idOrIds)) { - return state.source - .observe(idOrIds) - .map((update: ItemUpdate) => transformItemUpdate(state.queriesAndTransformations, update)); - } - else { - return state.source - .observe(idOrIds) - .map((update: any) => transformData(state.queriesAndTransformations, update)); - } - } - }, - get(this: QueryTransformResult, ids: string | string[]) { - const state = instanceStateMap.get(this); - const promise: Promise = state.initialFetch || Promise.resolve(); - const mapped = isMapped(this); - return promise.then(() => { - if (mapped) { - if (Array.isArray(ids)) { - return ids.map((id) => state.localData[state.localIndex.get(id)!]) - .filter((item) => Boolean(item)); - } - else { - return state.localData[state.localIndex.get(ids)!]; - } - } - else { - return this.source.get(ids).then((data: {} | {}[]) => { - if (Array.isArray(data)) { - return queryAndTransformData(state.queriesAndTransformations, data); - } - else { - return queryAndTransformData(state.queriesAndTransformations, [ data ])[0]; - } - }); - } - }); - }, - transform( - this: QueryTransformResult, - transformation: Patch | ((item: any) => V), - idTransform?: string | ((item: V) => string) - ): any { - const state = instanceStateMap.get(this); - const options: QueryTransformOptions = { - source: state.source, - queriesAndTransformations: [ - ...state.queriesAndTransformations, - { transformation: transformation, idTransform: idTransform } - ], - trackingFetchDelay: state.trackingFetchDelay, - fetchAroundUpdates: state.fetchAroundUpdates - }; - if (idTransform) { - return createMappedQueryTransformResult(options); - } - else { - return createQueryTransformResult(options); - } - }, - fetch(this: QueryTransformResult, query?: Query) { - const state = instanceStateMap.get(this); - - let firstQuery = new CompoundQuery(); - const queriesAndTransformations = state.queriesAndTransformations.slice(); - let nextQuery = queriesAndTransformations.shift(); - // Get the queries that can be passed through to the store. This includes all queries up to and including the - // first non incremental query(e.g. a range query) or up to and not including the first transformation - while (nextQuery && isQuery(nextQuery) && nextQuery.incremental) { - firstQuery = firstQuery.withQuery(nextQuery); - nextQuery = queriesAndTransformations.shift(); - } - if (nextQuery && isQuery(nextQuery)) { - firstQuery = firstQuery.withQuery(nextQuery); - } - else if (nextQuery) { - queriesAndTransformations.unshift(nextQuery); - } - - const mapped: MappedQueryTransformResult | undefined = isMapped(this) ? - this as MappedQueryTransformResult : undefined; - let nextUpdate: StoreDelta = (state.queuedUpdate && mapped) ? state.queuedUpdate : { - adds: [], - updates: [], - deletes: [], - beforeAll: [], - afterAll: [] - }; - state.currentUpdateIndex.clear(); - state.queuedUpdate = undefined; - - let resolveTotalLength: Function | undefined = undefined; - let rejectTotalLength: Function | undefined = undefined; - const totalLength = new Promise((resolve, reject) => { - resolveTotalLength = resolve; - rejectTotalLength = reject; - }); - let resolveDataLength: Function; - let rejectDataLength: Function; - const dataLength = new Promise((resolve, reject) => { - resolveDataLength = resolve; - rejectDataLength = reject; - }); - const fetchResult = state.source.fetch(firstQuery); - const resultsPromise = fetchResult.then( - (newData: any[]) => { - // We should apply the query transform result's own queries first so that the total size of the locally - // cached data can be determined - newData = queryAndTransformData(queriesAndTransformations, newData); - resolveDataLength(newData.length); - - if (mapped) { - const ids = mapped.identify(newData); - const newIndex = buildIndex(ids); - // Update this way if this is not an initial fetch. If this is the initial fetch, then this - // data (or subsequent data) will already be provided to observers in the initial notification, so don't - // send a redundant one. - if (resultsPromise !== state.initialFetch) { - nextUpdate.beforeAll = state.localData; - nextUpdate.afterAll = newData; - sendTrackedUpdate(state, mapped, newData, newIndex, nextUpdate); - } - state.localIndex = newIndex; - state.localData = newData; - } - if (query) { - newData = query.apply(newData); - } - return newData; - }, - (error: any) => { - rejectDataLength(error); - throw error; - }); - fetchResult.totalLength.then(resolveTotalLength, rejectTotalLength); - resultsPromise.dataLength = dataLength; - resultsPromise.totalLength = totalLength; - - if (!state.initialFetch) { - state.initialFetch = resultsPromise; - } - - return resultsPromise; - }, - - get source(this: QueryTransformResult) { - return instanceStateMap.get(this).source; - } -}, (instance: QueryTransformResult, options?: QueryTransformOptions) => { - if (!options) { - throw Error('Query Transform result cannot be created without providing a source store'); - } - const observable = new Observable>((observer: Observer>) => { - const state = instanceStateMap.get(instance); - state.observers.push(observer); - if (isMapped(instance)) { - const fetchPromise: Promise = state.initialFetch || Promise.resolve(); - fetchPromise.then(() => { - const addedToTracked: { item: any; id: string; index: number; }[] = []; - state.localIndex.forEach((index, id) => { - addedToTracked.push({ - index: index, - item: state.localData[index], - id: id - }); - }); - const trackedDelta: TrackableStoreDelta = { - updates: [], - deletes: [], - adds: [], - addedToTracked: addedToTracked.slice(), - removedFromTracked: [], - movedInTracked: [], - afterAll: state.localData.slice(), - beforeAll: [] - }; - observer.next(trackedDelta); - }); - } - else { - observer.next({ - updates: [], - adds: [], - deletes: [], - beforeAll: [], - afterAll: state.localData.slice() - }); - } - return () => { - function remove(observer: Observer>) { - state.observers.splice(state.observers.indexOf(observer), 1); - if (!state.observers.length && state.sourceHandle) { - state.sourceHandle.then((subscription) => { - if (!state.observers.length) { - subscription.unsubscribe(); - state.sourceHandle = undefined; - } - - }); - } - } - // Do the actual removal on the next tick so that - // we don't remove items from the array while we're iterating through it. - setTimeout(() => { - remove(observer); - }); - }; - }); - - const updateInPlace = canUpdateInPlace(options.queriesAndTransformations, instance); - - const state: QueryTransformState = { - source: options.source, - observers: [], - canUpdateInPlace: updateInPlace, - observable: observable, - localData: [], - localIndex: new Map(), - queriesAndTransformations: options.queriesAndTransformations, - isTracking: options.isTracking, - trackingFetchDelay: options.trackingFetchDelay, - currentUpdateIndex: new Set(), - fetchAndSendUpdates: debounce((instance: QueryTransformResult) => { - instance.fetch(); - }, options.trackingFetchDelay || 20), - fetchAroundUpdates: options.fetchAroundUpdates - }; - instanceStateMap.set(instance, state); - - if (options.isTracking && !options.fetchAroundUpdates) { - instance.fetch(); - } - -}); - -// TODO - Figure out how to get these factory types to work -const createMappedQueryTransformResult: MappedQueryTransformResultFactory = createQueryTransformResult - .mixin({ - mixin: compose({ - track(this: MappedQueryTransformResult): TrackedQueryTransformResult { - const state = instanceStateMap.get(this); - return createTrackedQueryTransformResult({ - isTracking: true, - source: state.source, - trackingFetchDelay: state.trackingFetchDelay, - queriesAndTransformations: state.queriesAndTransformations, - fetchAroundUpdates: state.fetchAroundUpdates - }); - }, - identify(this: QueryTransformResult, items: any[] | any): string | string[] { - const state = instanceStateMap.get(this); - const lastTransformation = state.queriesAndTransformations.reduce | undefined>( - (prev, next) => isTransformation(next) ? next : prev, undefined - ); - const itemArray = Array.isArray(items) ? items : [ items ]; - if (lastTransformation) { - const idTransform = lastTransformation.idTransform!; - if (typeof idTransform === 'string') { - return itemArray.map((item) => item[idTransform]); - } - else { - return itemArray.map(idTransform); - } - } - return state.source.identify(items); - } - }) - }); - -export const createTrackedQueryTransformResult: TrackedQueryTransformResultFactory = createMappedQueryTransformResult - .mixin({ - mixin: compose({ - release(this: QueryTransformResult) { - const state = instanceStateMap.get(this); - return createMappedQueryTransformResult({ - isTracking: false, - source: state.source, - queriesAndTransformations: state.queriesAndTransformations, - fetchAroundUpdates: state.fetchAroundUpdates - }); - } - }) - }); - -export default createMappedQueryTransformResult; diff --git a/src/store/createStore.ts b/src/store/createStore.ts deleted file mode 100644 index e686431..0000000 --- a/src/store/createStore.ts +++ /dev/null @@ -1,238 +0,0 @@ -import { Query } from '../query/interfaces'; -import Promise from '@dojo/shim/Promise'; -import WeakMap from '@dojo/shim/WeakMap'; -import Map from '@dojo/shim/Map'; -import { duplicate } from '@dojo/core/lang'; -import compose, { ComposeFactory } from '@dojo/compose/compose'; -import { Observer, Observable } from '@dojo/core/Observable'; -import Patch, { diff, PatchMapEntry } from '../patch/Patch'; -import _createStoreObservable, { StoreObservable } from './createStoreObservable'; -import InMemoryStorage, { Storage, FetchResult } from '../storage/InMemoryStorage'; - -export const enum StoreOperation { - Add, - Put, - Patch, - Delete -} - -export interface StoreOptions { - data?: T[]; - idProperty?: keyof T; - idFunction?: (item: T) => string; - storage?: Storage; -} - -export interface CrudOptions { - rejectOverwrite?: boolean; - id?: string; -} - -export type CrudArgument = T | string | PatchMapEntry; - -export interface UpdateResults { - currentItems?: T[]; - failedData?: CrudArgument[]; - successfulData: T[] | string[]; - type: StoreOperation; -} - -export type BasicPatch = { - id: string; - [index: string]: any; -} - -export type PatchArgument = Map> | - { id: string; patch: Patch } | - { id: string; patch: Patch }[] | - BasicPatch | - BasicPatch[]; - -export interface Store> { - get(ids: string[]): Promise; - get(id: string): Promise; - get(ids: string | string[]): Promise; - identify(items: T[]): string[]; - identify(items: T): string; - identify(items: T | T[]): string | string[]; - createId(): Promise; - add(items: T[] | T, options?: O): StoreObservable; - put(items: T[] | T, options?: O): StoreObservable; - patch(updates: PatchArgument, options?: O): StoreObservable; - delete(ids: string[] | string): StoreObservable; - fetch(query?: Query): FetchResult; -} - -export interface StoreFactory extends ComposeFactory, StoreOptions<{}, {}>> { - (options?: StoreOptions): Store>; -} - -interface BaseStoreState { - storage: Storage; - initialAddPromise: Promise; -} - -const instanceStateMap = new WeakMap, BaseStoreState<{}, {}, any>>(); - -function isPatchArray(patches: any[]): patches is { id: string; patch: Patch}[] { - return isPatch(patches[0]); -} - -function isPatch(patchObj: any): patchObj is {id: string; patch: Patch } { - const patch = patchObj && patchObj.patch; - const id = patchObj && patchObj.id; - return typeof id === 'string' && patch && Array.isArray(patch.operations) && typeof patch.apply === 'function' && - typeof patch.toString === 'function'; -} - -function createStoreObservable(storeResultsPromise: Promise>) { - - return _createStoreObservable( - new Observable>(function subscribe(observer: Observer>) { - storeResultsPromise - .then(function(results) { - observer.next(results); - observer.complete(); - }, function(error) { - observer.error(error); - }); - }), - function(results: UpdateResults<{}>) { - return results.successfulData; - } - ); -} - -const createStore: StoreFactory = compose, StoreOptions<{}, {}>>({ - get(this: Store<{}, {}, any>, ids: string[] | string): Promise<{}[] | {}> { - const state = instanceStateMap.get(this); - return state.initialAddPromise.then(function() { - if (Array.isArray(ids)) { - return state.storage.get(ids).then((items) => items.filter((item) => Boolean(item))); - } - else { - return state.storage.get([ids]).then(items => items[0]); - } - }); - }, - - add(this: Store<{}, {}, any>, items: {}[] | {}, options?: CrudOptions) { - const self = this; - const state = instanceStateMap.get(self); - const storeResultsPromise = state.initialAddPromise.then(function() { - return state.storage.add(Array.isArray(items) ? items : [ items ], options); - }); - return createStoreObservable(storeResultsPromise); - }, - - put(this: Store<{}, {}, any>, items: {}[] | {}, options?: CrudOptions) { - const self = this; - const state = instanceStateMap.get(self); - const storeResultsPromise = state.initialAddPromise.then(function() { - return state.storage.put(Array.isArray(items) ? items : [ items ], options); - }); - - return createStoreObservable(storeResultsPromise); - }, - - patch(this: Store<{}, {}, any>, updates: PatchArgument<{}>, options?: CrudOptions) { - const self = this; - const state = instanceStateMap.get(self); - let patchEntries: PatchMapEntry<{}, {}>[] = []; - if (Array.isArray(updates)) { - if (isPatchArray(updates)) { - patchEntries = updates; - } - else { - patchEntries = self.identify(updates).map((id, index) => { - return { id: id, patch: diff(updates[index])}; - }); - } - } - else if (updates instanceof Map) { - updates.forEach(function(value, key) { - patchEntries.push({ - id: key, - patch: value - }); - }); - } - else if (isPatch(updates)) { - patchEntries = [ updates ]; - } - else { - const dupe = duplicate(updates); - const idInOptions = (options && options.id); - const id = idInOptions || dupe.id; - if (!idInOptions) { - delete dupe.id; - } - patchEntries = [ { id: id, patch: diff(dupe) }]; - } - - const storeResultsPromise = state.initialAddPromise.then(function() { - return state.storage.patch(patchEntries); - }); - - return createStoreObservable(storeResultsPromise); - }, - - delete(this: Store<{}, {}, any>, ids: string | string[]) { - const self = this; - const state = instanceStateMap.get(self); - const storeResultsPromise = state.initialAddPromise.then(function() { - return state.storage.delete(Array.isArray(ids) ? ids : [ ids ]); - }); - - return createStoreObservable(storeResultsPromise); - }, - - fetch(this: Store<{}, {}, any>, query?: Query<{}>) { - const state = instanceStateMap.get(this); - let resolveTotalLength: (totalLength: number) => void; - let rejectTotalLength: (error: any) => void; - const totalLength = new Promise((resolve, reject) => { - resolveTotalLength = resolve; - rejectTotalLength = reject; - }); - const fetchResult: FetchResult<{}> = state.initialAddPromise.then(function() { - const result = state.storage.fetch(query); - result.totalLength.then(resolveTotalLength, rejectTotalLength); - return result; - }); - fetchResult.totalLength = fetchResult.dataLength = totalLength; - - return fetchResult; - }, - - identify(this: Store<{}, {}, any>, items: {}[] | {}): any { - const storage = instanceStateMap.get(this).storage; - if (Array.isArray(items)) { - return storage.identify(items); - } - else { - return storage.identify([items])[0]; - } - }, - - createId(this: Store<{}, {}, any>) { - return instanceStateMap.get(this).storage.createId(); - } -}, (instance: Store>, options: StoreOptions) => { - options = options || {}; - const data: T[] | undefined = options.data; - options.data = undefined; - const instanceState: BaseStoreState> = { - storage: options.storage || new InMemoryStorage(options), - initialAddPromise: Promise.resolve() - }; - instanceStateMap.set(instance, instanceState); - if (data) { - instanceState.initialAddPromise = instance.add(data).catch((error) => { - console.error(error); - }); - } - -}); - -export default createStore; diff --git a/src/store/createStoreObservable.ts b/src/store/createStoreObservable.ts index d0217d8..602d032 100644 --- a/src/store/createStoreObservable.ts +++ b/src/store/createStoreObservable.ts @@ -1,17 +1,11 @@ import Promise from '@dojo/shim/Promise'; import { Thenable } from '@dojo/interfaces/shim'; -import { Subscribable, Observable } from '@dojo/core/Observable'; +import { Observable } from '@dojo/core/Observable'; import global from '@dojo/core/global'; +import { StoreObservable } from '../interfaces'; global.Rx = { config: { Promise } }; -/** - * Adds a then method to the observable for those consumers of the store API who - * only want to know about the end result of an operation, and don't want to deal with - * any recoverable failures. - */ -export type StoreObservable = Subscribable & Promise - export default function createStoreObservable(observable: Observable, transform: (data: U) => T[]): StoreObservable { // Cast to any because the signatures of catch between the Observable and Promise interfaces are not // compatible diff --git a/src/store/materialize.ts b/src/store/materialize.ts index b4f932f..06ef5f5 100644 --- a/src/store/materialize.ts +++ b/src/store/materialize.ts @@ -1,21 +1,21 @@ import { Handle } from '@dojo/interfaces/core'; -import { MappedQueryTransformResult, QueryTransformResult, TrackableStoreDelta } from './createQueryTransformResult'; -import { StoreDelta } from './mixins/createObservableStoreMixin'; -import { Store } from './createStore'; +import { MappedQueryResultInterface, QueryResultInterface, TrackableStoreDelta } from './QueryResult'; +import { StoreDelta } from './ObservableStore'; +import { Store } from '../interfaces'; -export interface Materialization, T extends Store> { +export interface Materialization, T extends Store> { source: S; target: T; apply?(target: T, update: StoreDelta, source: S): void; } export interface MappedMaterialization< - I, S extends MappedQueryTransformResult, T extends Store + I, S extends MappedQueryResultInterface, T extends Store > extends Materialization { apply?(target: T, update: TrackableStoreDelta, source: S): void; } -export default function materialize, T extends Store>( +export default function materialize, T extends Store>( { source, target, apply }: Materialization ): Handle { let initialUpdate = true; diff --git a/src/store/mixins/createObservableStoreMixin.ts b/src/store/mixins/createObservableStoreMixin.ts deleted file mode 100644 index ec1efae..0000000 --- a/src/store/mixins/createObservableStoreMixin.ts +++ /dev/null @@ -1,757 +0,0 @@ -import createStore, { CrudOptions, Store, StoreOptions, UpdateResults } from '../createStore'; -import { Observable, Observer } from '@dojo/core/Observable'; -import WeakMap from '@dojo/shim/WeakMap'; -import Map from '@dojo/shim/Map'; -import Set from '@dojo/shim/Set'; -import Promise from '@dojo/shim/Promise'; -import { StoreObservable } from '../createStoreObservable'; -import { ComposeFactory, ComposeMixinDescriptor } from '@dojo/compose/compose'; -import { after } from '@dojo/compose/aspect'; -import { Query } from '../../query/interfaces'; -import { debounce } from '@dojo/core/util'; - -export interface StoreDelta { - /** - * Items updated since the last delta - */ - updates: T[]; - /** - * The IDs of any deleted items - */ - deletes: string[]; - /** - * New items added since the last delta - */ - adds: T[]; - /** - * The state of the store before any of these updates. - */ - beforeAll: T[]; - /** - * The state of the store after all of these updates. Doesn't necessarily - * reflect the current state of the underlying Storage, as it updates the local - * storage based on the known updates if fetchAroundUpdates is false - */ - afterAll: T[]; -} - -/** - * Combines several sequential deltas into a single delta. - * It performs several checks to remove redundant data. - * - Checks for repeated copies items with the same ID in - * adds and updates, or just the same ID in deletes, and keeps - * only the last. - * - Checks for deletes followed by adds or updates and replaces with a - * single update - * - Checks for adds followed by deletes and removes both - * - Checks for updates followed by deletes and removes the update - * @param instance The instance that can identify these items - * @param currentUpdate The current store delta - * @param newUpdate The new update to merge - * @returns The merged delta - */ -export function mergeDeltas( - instance: { identify(items: T | T[]): string[] }, - currentUpdate: StoreDelta, - newUpdate: StoreDelta -): StoreDelta { - /** - * Takes the last instance of an item repeated in the list - * @param items Added or updated items - * @returns The added or updated items with repeated items replaced by only the latest version of the item - */ - function takeLastItem(items: T[]): T[] { - const found: { [ index: string ]: boolean} = {}; - const ids = instance.identify(items); - return items.reverse().filter((_, index) => { - const id = ids[index]; - const exists = Boolean(found[id]); - found[id] = true; - return !exists; - }).reverse(); - } - - /** - * Takes the last instance of an id repeated in the list - * @param ids IDs of deleted items - * @returns The list with duplicates removed - */ - function takeLastId(ids: string[]): string[] { - const found: { [ index: string ]: boolean} = {}; - return ids.reverse().filter((id) => { - const exists = Boolean(found[id]); - found[id] = true; - return !exists; - }).reverse(); - } - - /** - * Removes updates for items that were later deleted - * @param newDeletes Deletes from delta(s) after the updates - * @param oldUpdates Updates from delta(s) before the deletes - * @return The updates without updates for subsequently deleted items - */ - function removeOutdatedItems(newDeletes: string[], oldUpdates: T[]) { - const deletedIds = newDeletes.reduce((prev, next) => { - prev.set(next, null); - return prev; - }, new Map()); - const ids = instance.identify(oldUpdates); - return oldUpdates.filter((_, index) => { - return !deletedIds.has(ids[index]); - }); - } - - /** - * Finds cases where an older update has an add, and a newer update has a delete, and removes - * both, since the net effect is that the operations are cancelled out - * @param newDeletes Deletes form delta(s) after the adds - * @param oldAdds Adds from delta(s) before the deletes - * @returns An object with the filtered adds and deletes - */ - function removeCancellingUpdates(newDeletes: string[], oldAdds: T[]) { - const deletedIds = newDeletes.reduce((prev, next) => { - prev.set(next, null); - return prev; - }, new Map()); - const ids = instance.identify(oldAdds); - const addIds = ids.reduce((prev, next) => { - prev.set(next, null); - return prev; - }, new Map()); - return { - oldAdds: oldAdds.filter((_, index) => { - return !deletedIds.has(ids[index]); - }), - newDeletes: newDeletes.filter((id) => !addIds.has(id)) - }; - } - - /** - * Finds places where an item was deleted and then added or updated, and removes the delete. If the item was added, - * the add is also replaced with an update since it should already exist in the collection receiving the updates, - * as it will never receive the delete - * @param oldDeletes - Deletes from delta(s) before the adds and updates - * @param newAdds - Adds from delta(s) after the deletes - * @param newUpdates - Updates from delta(s) after the deletes - * @returns An object containing the updated deletes, adds, and updates - */ - function convertReplacementToUpdate(oldDeletes: string[], newAdds: T[], newUpdates: T[]) { - const deletes = oldDeletes.reduce((prev, next) => { - prev.set(next, null); - return prev; - }, new Map()); - const addIds = instance.identify(newAdds); - const updateIds = instance.identify(newUpdates); - const adds = addIds.concat(updateIds).reduce((prev, next) => { - prev.set(next, null); - return prev; - }, new Map()); - const updatedUpdates = newUpdates.slice(); - return { - oldDeletes: oldDeletes.filter((id) => !adds.has(id)), - newAdds: newAdds.filter((item, index) => { - const shouldKeep = !deletes.has(addIds[index]); - if (!shouldKeep) { - // Always add it to the beginning, because it may have been updated as well, but the add - // has to have come first. - updatedUpdates.unshift(item); - } - return shouldKeep; - }), - newUpdates: updatedUpdates - }; - } - - const { oldDeletes, newAdds, newUpdates } = convertReplacementToUpdate( - currentUpdate.deletes, newUpdate.adds, newUpdate.updates - ); - const oldUpdates = removeOutdatedItems(newUpdate.deletes, currentUpdate.updates); - const { newDeletes, oldAdds } = removeCancellingUpdates(newUpdate.deletes, currentUpdate.adds); - return { - updates: takeLastItem([ ...oldUpdates, ...newUpdates ]), - adds: takeLastItem([ ...oldAdds, ...newAdds ]), - deletes: takeLastId([ ...oldDeletes, ...newDeletes ]), - beforeAll: currentUpdate.beforeAll, - afterAll: newUpdate.afterAll - }; -} - -/** - * An update for a single item, used to identify which item an update is for when multiple items are observed - * simultaneously. Deletes are indicated by the item property being undefined. - */ -export interface ItemUpdate { - item?: T; - id: string; -} - -export interface ObservableStoreMixin { - /** - * Observe the entire store, receiving deltas indicating the changes to the store. - * When observing, an initial update will be sent with the last known state of the store in the `afterAll` property. - * If fetchAroundUpdates is true, the store's local data will by synchronized with the underlying Storage. - * If fetchAroundUpdates is not true, then the data will be the result of locally applying updates to the data - * retrieved from the last fetch. - */ - observe(): Observable>; - /** - * Receives the current state of the item with the specified ID whenever it is updated. This observable will be - * completed if the item is deleted - * @param id The ID of the item to observe - */ - observe(id: string): Observable; - /** - * Receives the current state of the items in an `ItemUpdate` object whenever they are updated. When any of the - * items are deleted an `ItemUpdate` with the item's ID and no item property will be sent out. When all of the - * observed items are deleted the observable will be completed. - * @param ids - The IDS of the items to observe - */ - observe(ids: string[]): Observable>; -} - -export interface ObservableStoreMixinOptions { - /** - * If true, then the local collection will automatically fetch to get the latest data from the store whenver - * an update is made. - */ - fetchAroundUpdates?: boolean; - /** - * Specifies how long the fetch around updates should be debounced to avoid rapidly fetching when many updates - * are made within close proximity. Defaults to 200 milliseconds - */ - fetchAroundUpdateDebounce?: number; -} - -export type ObserverSetEntry = { observes: Set; observer: Observer> }; - -export interface ObservableStoreState { - fetchAroundUpdates: boolean; - /** - * A debounced function called to fetch the latest data and send updates to observers after each crud operation, - * if fetchAroundUpdates is true. - */ - fetchAndSendUpdates: (store: ObservableStore) => void; - /** - * Maps item IDs to observers for that item, or sets of observers. For Single item observers this is a one-to-many - * relationship. For `ObserverSetEntries`, this is a many to many relationship, each item can be observed as a part - * of many sets, and each set is linked to all of the items within it. - */ - itemObservers: Map | ObserverSetEntry)[]>; - /** - * All the observers of the store - */ - observers: Observer>[]; - /** - * The single observable provided to all observers of the store - */ - storeObservable: Observable>; - /** - * Updates currently waiting to be merged and sent - */ - queuedUpdate?: StoreDelta; - /** - * The latest local data - */ - localData: T[]; - /** - * Maps item IDs to indices in `localData` - */ - localIndex: Map; - /** - * When `fetchAroundUpdates` is true, this promise is used to wait for the first fetch before sending out initial - * updates, since `localData` will be out of date as soon as the fetch completes. - */ - initialFetch?: Promise; -} - -export interface ObservableStore> extends - ObservableStoreMixin, Store {} - -export type ObservableStoreOptions = ObservableStoreMixinOptions & StoreOptions; - -export interface ObservableStoreFactory extends ComposeFactory, ObservableStoreOptions<{}, {}>> { - (options?: ObservableStoreOptions): ObservableStore>; -} - -const instanceStateMap = new WeakMap, ObservableStoreState>(); - -/** - * Takes a collection of items and creates a new copy modified according to the provided updates. This can be used to - * attempt to track updates in the local collection when fetching after each update is disabled. - * @param store - * @param state - * @param data - * @param update - * @returns A new collection with the modifications specified by the update - */ -function addUpdateDelete>( - store: ObservableStore, - state: ObservableStoreState, - data: T[], - update: StoreDelta -) { - const newData = data.slice(); - update.adds.forEach((item) => { - newData.push(item); - }); - - store.identify(update.updates).forEach((id, index) => { - if (state.localIndex.has(id)) { - newData[state.localIndex.get(id)!] = update.updates[index]; - } - else { - newData.push(update.updates[index]); - } - }); - - update.deletes.sort().reverse().forEach((id) => { - if (state.localIndex.has(id)) { - newData.splice(state.localIndex.get(id)!, 1); - } - }); - - return newData; -} - -/** - * Build a map of ids to indices for the provided collection. This requires that the array of IDs is either what - * the index if for, or that the array of items the IDs represent is in the same order, which is already the case - * if the IDs were generated using the Store's identify function. - * @param ids - The IDS to build the index for - * @returns An index mapping ids to indices - */ -export function buildIndex(ids: string[]): Map { - return ids.reduce((map, id, index) => { - map.set(id, index); - return map; - }, new Map()); -} - -/** - * Merges the latest queued updates, updates the local data and index based on the latest data, - * sends out updates to observers, and then removes observers that unsubscribed during the update process from the list - * of observers. If after is provided, it is assumed that that is the latest data for the store, if it is not provided - * the local data is updated according to the merged delta and that is used as the new local data. - * @param store - * @param after - Optional array of items containing the latest data for the store. - */ -function sendUpdates>( - store: ObservableStore, - after?: T[] -) { - const state = instanceStateMap.get(store); - const storeDelta = state.queuedUpdate || { - updates: [], - adds: [], - deletes: [], - beforeAll: [], - afterAll: [] - }; - state.queuedUpdate = undefined; - after = after || addUpdateDelete(store, state, state.localData, storeDelta); - - storeDelta.beforeAll = state.localData; - storeDelta.afterAll = after; - state.localData = after; - state.localIndex = buildIndex(store.identify(after)); - - state.observers.forEach(function(observer) { - observer.next({ - updates: storeDelta.updates.slice(), - adds: storeDelta.adds.slice(), - deletes: storeDelta.deletes.slice(), - beforeAll: storeDelta.beforeAll.slice(), - afterAll: storeDelta.afterAll.slice() - }); - }); -} - -/** - * Determines whether this is a single observer or a set entry - * @param observer - * @returns {boolean} - */ -function isObserverEntry(observer: Observer | ObserverSetEntry): observer is ObserverSetEntry { - return ( observer).observes instanceof Set; -} - -/** - * Determines whether this is a single observer or a set entry - * @param observer - * @returns {boolean} - */ -function isObserver(observer: Observer | ObserverSetEntry): observer is Observer { - return !isObserverEntry(observer); -} - -/** - * Iterates through the provided items and/or IDs and notifies observers. If items is provided, then the - * observers for that item, and the observers for sets of items that include that are updated. If items is null, then - * these are delete notifications for observers of multiple items. In this case, no update is sent to individual - * observers, and observers of sets receive `ItemUpdate` objects with the IDs of the deleted items and an undefined item - * - * @param items Items to send updates for, or null if these are delete notifications for item set observers - * @param ids - IDs of the items, should be in the same order as items - * @param state - * @param store - */ -function notifyItemObservers>( - items: T[] | null, - ids: string[], - state: ObservableStoreState, - store: ObservableStore -) { - function notify(id: string, after?: T) { - if (state.itemObservers.has(id)) { - state.itemObservers.get(id)!.map(function(observerOrEntry): Observer> | null { - if (isObserverEntry(observerOrEntry)) { - return observerOrEntry.observer; - } - else { - return null; - } - }).filter(function(observerEntry) { - return observerEntry; - }).forEach(function(observer: Observer>) { - observer.next({ - item: after, - id: id - }); - }); - if (after) { - state.itemObservers.get(id)!.map(function(observerOrEntry): Observer | null { - if (isObserver(observerOrEntry)) { - return observerOrEntry; - } - else { - return null; - } - }).filter(function(observer) { - return observer; - }).forEach(function(observer: Observer) { - observer.next(after); - }); - } - } - } - if (items) { - items.forEach(function(after: T, index: number) { - const id = ids[index] || store.identify(after); - notify(id, after); - }); - } - else { - ids.forEach(function(id) { - notify(id, undefined); - }); - } -} - -/** - * Queues the appropriate update and then either starts up a fetch or just triggers sending the updates depending - * on the `fetchAroundUpdates` property - * @param state - * @param store - * @param updates Updated items - * @param adds Added items - * @param deletes Deleted IDs - */ -function sendUpdatesOrFetch>( - state: ObservableStoreState, - store: ObservableStore, - updates: T[], - adds: T[], - deletes: string[] -) { - const newUpdate = { - updates: updates, - adds: adds, - deletes: deletes, - beforeAll: [], - afterAll: [] - }; - state.queuedUpdate = state.queuedUpdate ? mergeDeltas(store, state.queuedUpdate, newUpdate) : newUpdate; - if (state.fetchAroundUpdates) { - state.fetchAndSendUpdates(store); - } - else { - sendUpdates(store); - } -} -function createObservableStoreMixin>(): ComposeMixinDescriptor< - Store, - CrudOptions, - ObservableStoreMixin, - ObservableStoreMixinOptions -> { - return { - mixin: { - observe(this: ObservableStore, idOrIds?: string | string[]): any { - if (idOrIds) { - const self = > this; - const state = instanceStateMap.get(self); - if (Array.isArray(idOrIds)) { - const ids = idOrIds; - - const idSet = new Set(ids); - const observable = new Observable>(function subscribe(observer: Observer>) { - const observerEntry: ObserverSetEntry = { - observes: idSet, - observer: observer - }; - ids.forEach(function(id: string) { - if (state.itemObservers.has(id)) { - state.itemObservers.get(id)!.push(observerEntry); - } - else { - state.itemObservers.set(id, [observerEntry]); - } - }); - const foundIds = new Set(); - observer.next = after(observer.next, (result: any, itemUpdate: ItemUpdate) => { - foundIds.add(itemUpdate.id); - return result; - }); - - self.get(ids).then(function(items: T[]) { - if (foundIds.size !== ids.length) { - const retrievedIdSet = new Set(self.identify(items)); - let missingItemIds = ids.filter(id => !retrievedIdSet.has(id)); - - if (retrievedIdSet.size !== idSet.size || missingItemIds.length) { - observer.error(new Error(`ID(s) "${missingItemIds}" not found in store`)); - } - else { - items.forEach((item, index) => observer.next({ - item: item, - id: ids[index] - })); - } - } - }); - }); - return observable; - } - else { - const id = idOrIds; - return new Observable(function subscribe(observer: Observer) { - self.get(id).then(function(item: T) { - if (!item) { - observer.error(new Error(`ID "${id}" not found in store`)); - } - else { - if (state.itemObservers.has(id)) { - state.itemObservers.get(id)!.push(observer); - } - else { - state.itemObservers.set(id, [ observer ]); - } - observer.next(item); - } - }); - }); - } - } - else { - return instanceStateMap.get(this).storeObservable; - } - } - }, - aspectAdvice: { - after: { - /** - * After fetching, sends updates if no query was used. If a custom query was used then the data retrieved - * is not indicative of the local data and can't be used. We shouldn't apply the query locally because we - * have no knowledge of the underlying storage implementation or the amount of data and it may be too much - * data to retrieve or update in memory. If this is the initialFetch, don't update since that update - * will be sent to each subscriber at the time of subscription. If we're not sending updates, still set - * the local data and index to the newly retrieved data. - * @param result - * @param query - * @returns {Promise} - */ - fetch(this: ObservableStore, result: Promise, query?: Query) { - if (!query) { - result.then( - (data) => { - const state = instanceStateMap.get(this); - if (result !== state.initialFetch) { - sendUpdates(this, data); - } - else { - state.localData = data; - state.localIndex = buildIndex(this.identify(data)); - } - }, - // Ignore errors here, they should be handled by the caller not observers - () => {} - ); - } - return result; - }, - - /** - * After the put is completed, notify the item observers, and then either queue a fetch to send updates - * if fetchAroundUpdates is true, or just send updates if not. - * @param result - * @returns {StoreObservable} - */ - put(this: ObservableStore, result: StoreObservable) { - result.then( - (updatedItems: T[]) => { - const state = instanceStateMap.get(this); - notifyItemObservers(updatedItems, [], state, this); - sendUpdatesOrFetch(state, this, updatedItems, [], []); - }, - // Ignore errors here, they should be handled by the caller not observers - () => {} - ); - return result; - }, - - /** - * After the patch is completed, notify the item observers, and then either queue a fetch to send updates - * if fetchAroundUpdates is true, or just send updates if not. - * @param result - * @returns {StoreObservable} - */ - patch(this: ObservableStore, result: StoreObservable) { - result.then( - (updatedItems: T[]) => { - const state = instanceStateMap.get(this); - notifyItemObservers(updatedItems, [], state, this); - sendUpdatesOrFetch(state, this, updatedItems, [], []); - }, - // Ignore errors here, they should be handled by the caller not observers - () => {} - ); - return result; - }, - - /** - * After the add is completed notify observers. If this is the initial add AND we are fetching around - * updates, then the first update to subscribers will already contain this data, since the initial fetch - * is performed after the initial add. In this case we do not need to send an update. We can tell this - * is the first add because it'll be triggered in the createStore base before the state is created for - * this instance in the mixin's initializer - * @param result - * @returns {StoreObservable} - */ - add(this: ObservableStore, result: StoreObservable) { - const isFirstAdd = !instanceStateMap.get(this); - - result.then( - (addedItems: T[]) => { - const state = instanceStateMap.get(this); - if (!isFirstAdd || !state.fetchAroundUpdates) { - sendUpdatesOrFetch(state, this, [], addedItems, []); - } - }, - // Ignore errors here, they should be handled by the caller not observers - () => {} - ); - return result; - }, - - /** - * After the items are deleted, notify item set observers of the deletion of one of the items they are - * observing, and then complete any observables that need to be completed. - * Completing observables is dones as follows - * - For observers of a single item, just complete the observer - * - For observers of a set of items - * - Remove the deleted ID of this item from the set of observed IDs - * - If there are now no observed IDs for the set, complete the observable - * - Remove the item observer entry for the deleted ID - * @param result - * @param ids - * @returns {StoreObservable} - */ - delete(this: ObservableStore, result: StoreObservable, ids: string | string[]) { - result.then( - (deleted: string[]) => { - const state = instanceStateMap.get(this); - notifyItemObservers(null, deleted, state, this); - deleted.forEach(function(id: string) { - if (state.itemObservers.has(id)) { - state.itemObservers.get(id)!.forEach(function(observerOrEntry) { - if (isObserver(observerOrEntry)) { - observerOrEntry.complete(); - } - else { - observerOrEntry.observes.delete(id); - if (!observerOrEntry.observes.size) { - observerOrEntry.observer.complete(); - } - } - }); - state.itemObservers.delete(id); - } - }); - sendUpdatesOrFetch(state, this, [], [], deleted); - }, - // Ignore errors here, they should be handled by the caller not observers - () => {} - ); - return result; - } - } - }, - initialize>(instance: ObservableStore, options?: ObservableStoreOptions) { - options = options || {}; - const itemObservers = new Map | ObserverSetEntry)[]>(); - const storeObservable = new Observable>(function(this: ObservableStoreMixin, observer: Observer>) { - const state = instanceStateMap.get(this); - state.observers.push(observer); - if (state.initialFetch) { - state.initialFetch.then(() => { - observer.next({ - updates: [], - deletes: [], - adds: [], - beforeAll: [], - afterAll: state.localData.slice() - }); - }); - } - else { - observer.next({ - updates: [], - deletes: [], - adds: [], - beforeAll: [], - afterAll: state.localData.slice() - }); - } - return () => { - function remove(observer: Observer>) { - state.observers.splice(state.observers.indexOf(observer), 1); - } - setTimeout(() => { - remove(observer); - }); - }; - }.bind(instance)); - const state: ObservableStoreState = { - fetchAroundUpdates: Boolean(options.fetchAroundUpdates), - fetchAndSendUpdates: debounce((store: ObservableStore) => { - store.fetch(); - }, options.fetchAroundUpdateDebounce || 20), - itemObservers: itemObservers, - observers: [], - storeObservable: storeObservable, - localData: [], - localIndex: new Map() - }; - if (options.fetchAroundUpdates) { - state.initialFetch = instance.fetch(); - } - - instanceStateMap.set(instance, state); - } - }; -} -export default createObservableStoreMixin; - -export const createObservableStore: ObservableStoreFactory = createStore - .mixin(createObservableStoreMixin()); diff --git a/src/store/mixins/createQueryTransformMixin.ts b/src/store/mixins/createQueryTransformMixin.ts deleted file mode 100644 index 1f960b1..0000000 --- a/src/store/mixins/createQueryTransformMixin.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { Query, QueryType } from '../../query/interfaces'; -import createFilter, { Filter } from '../../query/createFilter'; -import createRange, { StoreRange } from '../../query/createStoreRange'; -import { StoreOptions, CrudOptions, UpdateResults } from '../createStore'; -import createSort, { Sort } from '../../query/createSort'; -import { ComposeMixinDescriptor, ComposeFactory } from '@dojo/compose/compose'; -import { ObservableStore, createObservableStore, ObservableStoreOptions } from './createObservableStoreMixin'; -import Patch from '../../patch/Patch'; -import createMappedQueryTransformResult, { QueryTransformResult, MappedQueryTransformResult, createQueryTransformResult } from '../createQueryTransformResult'; -import WeakMap from '@dojo/shim/WeakMap'; - -export interface QueryTransformMixin> { - /** - * Creates a query transform result with the provided query - * @param query - */ - query(query: Query): MappedQueryTransformResult>; - /** - * Creates a query transform result with the provided filter - * @param filter - */ - filter(filter: Filter): MappedQueryTransformResult>; - /** - * Creates a query transform result with a filter built from the provided test - * @param test - */ - filter(test: (item: T) => boolean): MappedQueryTransformResult>; - /** - * Creates a query transform result with the provided range - * @param range - */ - range(range: StoreRange): MappedQueryTransformResult>; - /** - * Creates a query transform result with a range built based on the provided start and count - * @param start - * @param cound - */ - range(start: number, count: number): MappedQueryTransformResult>; - /** - * Creates a query transform result with the provided sort or a sort build from the provided comparator or a - * comparator for the specified property - * @param sort - * @param descending - */ - sort(sort: Sort | ((a: T, b: T) => number) | string, descending?: boolean): MappedQueryTransformResult>; - /** - * Create a query transform result that cannot be tracked, and cannot send tracked updates. This is the case because - * the resulting query transform result will have no way to identify items, making it impossible to determine - * whether their position has shifted or differentiating between updates and adds - * @param transformation - */ - transform(transformation: Patch | ((item: T) => V)): QueryTransformResult>; - /** - * Create a trackable query transform result with the specified transformation - * @param transformation - * @param idTransform - */ - transform(transformation: Patch | ((item: T) => V), idTransform: string | ((item: V) => string)): MappedQueryTransformResult>; -} - -export interface QueryTransformState { - /** - * We have to track whether we're fetching around updates because the query transform results need to know this in - * order to determine whether they can fully trust the data provided to them from the source store's observabel - */ - fetchAroundUpdates: boolean; -} -/** - * Check if this is a filter query or just a test function - * @param filterOrTest - * @returns {boolean} - */ -export function isFilter(filterOrTest: Query | ((item: T) => boolean)): filterOrTest is Filter { - return typeof filterOrTest !== 'function' && (> filterOrTest).queryType === QueryType.Filter; -} - -/** - * Check if this is a sort query or just a comparator - * @param sortOrComparator - * @returns {boolean} - */ -export function isSort(sortOrComparator: Sort | ((a: T, b: T) => number) | string): sortOrComparator is Sort { - const paramType = typeof sortOrComparator; - return paramType !== 'function' && paramType !== 'string' && typeof (> sortOrComparator).apply === 'function'; -} - -export type QueryStore> = QueryTransformMixin & S; - -export interface QueryStoreFactory extends ComposeFactory, StoreOptions> { - >(options?: ObservableStoreOptions): QueryStore; -} - -export interface SimpleQueryStoreFactory extends ComposeFactory, StoreOptions> { - (options?: ObservableStoreOptions): QueryStore>>; -} - -const instanceStateMap = new WeakMap(); - -function createQueryTransformMixin>(): ComposeMixinDescriptor< - ObservableStore, - StoreOptions, - QueryTransformMixin, - StoreOptions -> { - const queryMixin: QueryTransformMixin = { - query(this: QueryStore, query: Query) { - return createMappedQueryTransformResult>({ - source: this, - queriesAndTransformations: [ query ], - fetchAroundUpdates: instanceStateMap.get(this).fetchAroundUpdates - }); - }, - - filter(this: QueryStore, filterOrTest: Filter | ((item: T) => boolean)) { - let filter: Filter; - if (isFilter(filterOrTest)) { - filter = filterOrTest; - } - else { - filter = createFilter().custom(<(item: T) => boolean> filterOrTest); - } - - return this.query(filter); - }, - - range(this: QueryStore, rangeOrStart: StoreRange | number, count?: number) { - let range: StoreRange; - if (typeof count !== 'undefined') { - range = createRange( rangeOrStart, count); - } - else { - range = > rangeOrStart; - } - - return this.query(range); - }, - - sort(this: QueryStore, sortOrComparator: Sort | ((a: T, b: T) => number), descending?: boolean) { - let sort: Sort; - if (isSort(sortOrComparator)) { - sort = sortOrComparator; - } - else { - sort = createSort(sortOrComparator, descending); - } - - return this.query(sort); - }, - - transform(this: QueryStore, transformation: Patch | ((item: T) => V), idTransform?: string | ((item: V) => string)): any { - const options = { - source: this, - queriesAndTransformations: [ { transformation: transformation, idTransform: idTransform} ], - fetchAroundUpdates: instanceStateMap.get(this).fetchAroundUpdates - }; - if (idTransform) { - return createMappedQueryTransformResult>(options); - } - else { - return createQueryTransformResult>(options); - } - } - }; - return { - mixin: queryMixin, - initialize(instance: QueryStore, options?: { fetchAroundUpdates?: boolean}) { - instanceStateMap.set(instance, { - fetchAroundUpdates: (options && options.fetchAroundUpdates) || false - }); - } - }; -} - -export default createQueryTransformMixin; - -export const createQueryStore: SimpleQueryStoreFactory = createObservableStore - .mixin(createQueryTransformMixin()); diff --git a/src/store/mixins/createTransactionMixin.ts b/src/store/mixins/createTransactionMixin.ts deleted file mode 100644 index 2dbdd2b..0000000 --- a/src/store/mixins/createTransactionMixin.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { Store, CrudOptions, UpdateResults } from '../createStore'; -import createStoreObservable, { StoreObservable } from '../createStoreObservable'; -import Patch from '../../patch/Patch'; -import Map from '@dojo/shim/Map'; -import WeakMap from '@dojo/shim/WeakMap'; -import { Observable } from '@dojo/core/Observable'; -import compose, { ComposeFactory } from '@dojo/compose/compose'; - -export interface TransactionMixin, C extends Store> { - transaction(): Transaction; -} - -export type TransactionStore, C extends Store> = TransactionMixin & C; - -export interface Transaction, C extends Store> { - abort(): TransactionStore; - commit(): StoreObservable; - add(items: T[] | T, options?: O): Transaction; - put(items: T[] | T, options?: O): Transaction; - patch(updates: Map> | { id: string; patch: Patch } | { id: string; patch: Patch }[], options?: O): Transaction; - delete(ids: string[] | string): Transaction; -} - -interface TransactionOptions, C extends Store> { - store?: C; -} - -interface TransactionState, C extends Store> { - store: C; - actions: Array<() => StoreObservable>; -} - -const instanceStateMap = new WeakMap, Store<{}, {}, UpdateResults<{}>>>, TransactionState<{}, {}, UpdateResults<{}>, any>>(); -function createTransactionMixin, C extends Store>() { - const createTransaction: ComposeFactory, TransactionOptions> = - compose, TransactionOptions>({ - put(this: Transaction, items: T[] | T, options?: O) { - const state = instanceStateMap.get(this); - state.actions.push(() => { - return state.store.put(items, options); - }); - return this; - }, - - patch(this: Transaction, updates: Map>, options?: O) { - const state = instanceStateMap.get(this); - state.actions.push(() => { - return state.store.patch(updates); - }); - return this; - }, - - add(this: Transaction, items: T[]| T, options?: O) { - const state = instanceStateMap.get(this); - state.actions.push(() => { - return state.store.add(items, options); - }); - return this; - }, - - delete(this: Transaction, ids: string[] | string) { - const state = instanceStateMap.get(this); - state.actions.push(() => { - return state.store.delete(ids); - }); - return this; - }, - - commit(this: Transaction) { - const state = instanceStateMap.get(this); - return createStoreObservable( - Observable.from(state.actions.map( - function(action: () => StoreObservable) { - return Observable.defer(action); - })).mergeAll(1).toArray(), - function(updateResultsList) { - const data: (T | string)[] = []; - return updateResultsList.reduce(function(prev, next) { - return prev.concat(next.successfulData); - }, data); - }); - }, - - abort(this: Transaction) { - const state = instanceStateMap.get(this); - state.actions = []; - return state.store; - } - - }, function (instance: Transaction, options: TransactionOptions) { - instanceStateMap.set(instance, { - store: options.store, - actions: [] - }); - }); - - const transactionMixin = compose, any>({ - transaction(this: TransactionStore) { - return createTransaction( { - store: this - } ); - } - }); - - return transactionMixin; -} - -export default createTransactionMixin; diff --git a/tests/unit/all.ts b/tests/unit/all.ts index 768277e..8c9a75e 100644 --- a/tests/unit/all.ts +++ b/tests/unit/all.ts @@ -7,10 +7,9 @@ import './query/createStoreRange'; import './query/CompoundQuery'; import './storage/InMemoryStorage'; import './storage/IndexedDBStorage'; -import './store/createStore'; -import './store/mixins/createObservableStoreMixin'; -import './store/mixins/createTransactionMixin'; -import './store/mixins/createQueryTransformMixin/querying'; -import './store/mixins/createQueryTransformMixin/tracking'; -import './store/mixins/createQueryTransformMixin/transforming'; +import './store/StoreBase'; +import './store/ObservableStore'; +import './store/QueryableStore/querying'; +import './store/QueryableStore/tracking'; +import './store/QueryableStore/transforming'; import './store/materialize'; diff --git a/tests/unit/query/CompoundQuery.ts b/tests/unit/query/CompoundQuery.ts index b500aef..c9f7343 100644 --- a/tests/unit/query/CompoundQuery.ts +++ b/tests/unit/query/CompoundQuery.ts @@ -1,7 +1,7 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; import CompoundQuery from '../../../src/query/CompoundQuery'; -import { QueryType } from '../../../src/query/interfaces'; +import { QueryType } from '../../../src/interfaces'; import createSort from '../../../src/query/createSort'; import createRange from '../../../src/query/createStoreRange'; import { createData, ItemType } from '../support/createData'; diff --git a/tests/unit/storage/InMemoryStorage.ts b/tests/unit/storage/InMemoryStorage.ts index 60db202..00dac0f 100644 --- a/tests/unit/storage/InMemoryStorage.ts +++ b/tests/unit/storage/InMemoryStorage.ts @@ -3,7 +3,7 @@ import * as assert from 'intern/chai!assert'; import Set from '@dojo/shim/Set'; import InMemoryStorage from '../../../src/storage/InMemoryStorage'; import Promise from '@dojo/shim/Promise'; -import { StoreOperation } from '../../../src/store/createStore'; +import { StoreOperation } from '../../../src/interfaces'; import createFilter from '../../../src/query/createFilter'; import createSort from '../../../src/query/createSort'; import createRange from '../../../src/query/createStoreRange'; @@ -18,7 +18,7 @@ function getStorageAndDfd(test: any, option = {}) { } registerSuite({ - name: 'createInMemoryStorage', + name: 'InMemoryStorage', 'identify': { 'Should identify by idProperty if exists.'(this: any) { const storage = new InMemoryStorage({ diff --git a/tests/unit/storage/IndexedDBStorage.ts b/tests/unit/storage/IndexedDBStorage.ts index 01b0ce4..6f1c970 100644 --- a/tests/unit/storage/IndexedDBStorage.ts +++ b/tests/unit/storage/IndexedDBStorage.ts @@ -3,16 +3,15 @@ import * as assert from 'intern/chai!assert'; import * as sinon from 'sinon'; import IndexedDBStorage, { createRequestPromise } from '../../../src/storage/IndexedDBStorage'; import Promise from '@dojo/shim/Promise'; -import { StoreOperation, CrudOptions } from '../../../src/store/createStore'; +import { StoreOperation, CrudOptions } from '../../../src/interfaces'; import createFilter from '../../../src/query/createFilter'; import createSort from '../../../src/query/createSort'; import createRange from '../../../src/query/createStoreRange'; import CompoundQuery from '../../../src/query/CompoundQuery'; import { createData, createUpdates, ItemType, patches } from '../support/createData'; -import { Storage } from '../../../src/storage/InMemoryStorage'; -import {QueryType, Query} from '../../../src/query/interfaces'; +import { QueryType, Query, Storage } from '../../../src/interfaces'; import JsonPointer from '../../../src/patch/JsonPointer'; -import {BooleanOp} from '../../../src/query/createFilter'; +import { BooleanOp } from '../../../src/query/createFilter'; registerSuite((function(){ const isIndexedDbAvailable = typeof indexedDB !== 'undefined'; diff --git a/tests/unit/store/mixins/createObservableStoreMixin.ts b/tests/unit/store/ObservableStore.ts similarity index 94% rename from tests/unit/store/mixins/createObservableStoreMixin.ts rename to tests/unit/store/ObservableStore.ts index b4e4c1e..c11b005 100644 --- a/tests/unit/store/mixins/createObservableStoreMixin.ts +++ b/tests/unit/store/ObservableStore.ts @@ -1,20 +1,18 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; -import { - createObservableStore, ObservableStore, StoreDelta, ItemUpdate -} from '../../../../src/store/mixins/createObservableStoreMixin'; -import { ItemType, createData, createUpdates, patches, patchedItems } from '../../support/createData'; -import { CrudOptions, UpdateResults } from '../../../../src/store/createStore'; -import createAsyncStorage from '../../support/AsyncStorage'; -import InMemoryStorage from '../../../../src/storage/InMemoryStorage'; +import ObservableStore, { ObservableStoreInterface, StoreDelta, ItemUpdate } from '../../../src/store/ObservableStore'; +import { ItemType, createData, createUpdates, patches, patchedItems } from '../support/createData'; +import { CrudOptions, UpdateResults } from '../../../src/interfaces'; +import AsyncStorage from '../support/AsyncStorage'; +import InMemoryStorage from '../../../src/storage/InMemoryStorage'; import Set from '@dojo/shim/Set'; import Promise from '@dojo/shim/Promise'; function getStoreAndDfd(test: any) { const dfd = test.async(1000); - const observableStore: ObservableStore> = createObservableStore( { data: createData() } ); - const emptyObservableStore = createObservableStore(); - const fetchingObservableStore: ObservableStore> = createObservableStore( { + const observableStore: ObservableStoreInterface> = new ObservableStore({ data: createData() } ); + const emptyObservableStore = new ObservableStore(); + const fetchingObservableStore: ObservableStoreInterface> = new ObservableStore( { data: createData(), fetchAroundUpdates: true }); @@ -23,14 +21,14 @@ function getStoreAndDfd(test: any) { } function getStoreWithAsyncStorage(test: any, asyncOptions?: {}, useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const asyncStorage = createAsyncStorage(asyncOptions); - const observableStore = createObservableStore({ storage: asyncStorage }); + const asyncStorage = new AsyncStorage(asyncOptions); + const observableStore = new ObservableStore({ storage: asyncStorage }); return { dfd, observableStore, asyncStorage }; } registerSuite({ - name: 'observableStoreMixin', + name: 'ObservableStore', 'with basic store': (function() { const ids = createData().map(function(item) { @@ -235,7 +233,7 @@ registerSuite({ add(this: any) { const { dfd, data } = getStoreAndDfd(this); - const fetchingObservableStore = createObservableStore({ + const fetchingObservableStore = new ObservableStore({ fetchAroundUpdates: true }); let ignoreFirst = true; @@ -529,7 +527,7 @@ registerSuite({ 'when operation fails in an ordered store, the error should be sent the observable way.'(this: any) { const dfd = this.async(1000); - const store = createObservableStore({ + const store = new ObservableStore({ data: createData() }); @@ -546,7 +544,7 @@ registerSuite({ const dfd = this.async(1000); const preLoadedStorage = new InMemoryStorage(); preLoadedStorage.add(createData()); - const store = createObservableStore({ + const store = new ObservableStore({ storage: preLoadedStorage }); @@ -570,7 +568,7 @@ registerSuite({ }, 'unsubscribing and resubscribing'(this: any) { - const store = createObservableStore({ + const store = new ObservableStore({ fetchAroundUpdates: true, data: createData() }); @@ -604,7 +602,7 @@ registerSuite({ }, 'unsubscribing in update'(this: any) { - const store = createObservableStore({ + const store = new ObservableStore({ fetchAroundUpdates: true, data: createData() }); @@ -669,7 +667,7 @@ registerSuite({ return rejected; } }; - const observableStore = createObservableStore({ + const observableStore = new ObservableStore({ storage: failingStorage }); @@ -740,7 +738,7 @@ registerSuite({ const data = createData(); let ignoreFirst = true; - observableStore.observe().subscribe((update: StoreDelta) => { + observableStore.observe().subscribe((update) => { if (ignoreFirst) { ignoreFirst = false; return; @@ -787,7 +785,7 @@ registerSuite({ }, 'should be able to observe with initial items'(this: any) { const { dfd, asyncStorage } = getStoreWithAsyncStorage(this, { put: 50, get: 10 }); - const observableStore = createObservableStore({ storage: asyncStorage, data: createData() }); + const observableStore = new ObservableStore({ storage: asyncStorage, data: createData() }); const data = createData(); observableStore.observe('item-1').subscribe(function(update: ItemType) { diff --git a/tests/unit/store/mixins/createQueryTransformMixin/querying.ts b/tests/unit/store/QueryableStore/querying.ts similarity index 90% rename from tests/unit/store/mixins/createQueryTransformMixin/querying.ts rename to tests/unit/store/QueryableStore/querying.ts index 34db1b5..196945b 100644 --- a/tests/unit/store/mixins/createQueryTransformMixin/querying.ts +++ b/tests/unit/store/QueryableStore/querying.ts @@ -1,36 +1,37 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; import * as sinon from 'sinon'; -import { createData, ItemType, patches, createUpdates } from '../../../support/createData'; -import createFilter from '../../../../../src/query/createFilter'; -import createRange from '../../../../../src/query/createStoreRange'; -import createSort from '../../../../../src/query/createSort'; -import createAsyncStorage from '../../../support/AsyncStorage'; -import { createQueryStore } from '../../../../../src/store/mixins/createQueryTransformMixin'; -import { diff } from '../../../../../src/patch/Patch'; +import { createData, ItemType, patches, createUpdates } from '../../support/createData'; +import createFilter from '../../../../src/query/createFilter'; +import createRange from '../../../../src/query/createStoreRange'; +import createSort from '../../../../src/query/createSort'; +import AsyncStorage from '../../support/AsyncStorage'; +import QueryStore from '../../../../src/store/QueryableStore'; +import { QueryResult } from '../../../../src/store/QueryResult'; +import { diff } from '../../../../src/patch/Patch'; import Promise from '@dojo/shim/Promise'; -import { createQueryTransformResult } from '../../../../../src/store/createQueryTransformResult'; +import { CrudOptions } from '../../../../src/interfaces'; function getStoreAndDfd(test: any, useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }); - const emptyStore = createQueryStore(); + const emptyStore = new QueryStore(); return { dfd, queryStore, emptyStore }; } function getStoreWithAsyncStorage(test: any, asyncOptions?: {}, useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const asyncStorage = createAsyncStorage(asyncOptions); - const queryStore = createQueryStore({ storage: asyncStorage }); + const asyncStorage = new AsyncStorage(asyncOptions); + const queryStore = new QueryStore({ storage: asyncStorage }); return { dfd, queryStore, asyncStorage }; } registerSuite({ - name: 'Query-Transform Mixin - Querying', + name: 'Queryable Store - Querying', 'single query': function(this: any) { const { queryStore } = getStoreAndDfd(this, false); @@ -136,7 +137,7 @@ registerSuite({ 'should be notified of changes in parent collection on items in query or just moved from query'(this: any) { const dfd = this.async(2000); - const store = createQueryStore(); + const store = new QueryStore(); const data = createData(); const updates = createUpdates(); const calls: Array<() => any> = [ @@ -173,7 +174,7 @@ registerSuite({ 'shouldn\'t get notifications for updates outside of query'(this: any) { const dfd = this.async(2000); - const store = createQueryStore<{ id: string, value: number }>(); + const store = new QueryStore<{ id: string, value: number }>(); const filteredView = store.filter(createFilter().lessThan('value', 5)); let ignoreFirst = true; @@ -221,7 +222,7 @@ registerSuite({ 'notification for item deleted from initial data'(this: any) { const dfd = this.async(1000); - const store = createQueryStore({ + const store = new QueryStore({ data: createData() }); const data = createData(); @@ -273,7 +274,7 @@ registerSuite({ 'notification on item deleted from initial data after fetch'(this: any) { const dfd = this.async(1000); const data = createData(); - const store = createQueryStore({ + const store = new QueryStore({ data: data }); const filtered = store.filter(createFilter().greaterThan('value', 0)); @@ -370,7 +371,7 @@ registerSuite({ }, 'unsubscribing and resubscribing'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ fetchAroundUpdates: true, data: createData() }); @@ -406,7 +407,7 @@ registerSuite({ }, 'unsubscribing with another subscriber'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ fetchAroundUpdates: true, data: createData() }); @@ -444,7 +445,7 @@ registerSuite({ }, 'unsubscribing in update'(this: any) { - const store = createQueryStore({ + const store = new QueryStore({ fetchAroundUpdates: true, data: createData() }); @@ -490,14 +491,14 @@ registerSuite({ 'should throw if created without a source'(this: any) { assert.throw( - () => createQueryTransformResult(), 'Query Transform result cannot be created without providing a source store' + () => new QueryResult(), 'Query Transform result cannot be created without providing a source store' ); }, 'totalLength and dataLength': { 'totalLength should return the total number of items in storage': { 'fetch all'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }).filter(() => false); const fetchResult = queryStore.fetch(); @@ -510,7 +511,7 @@ registerSuite({ }, 'fetch with query'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }).filter(() => false); const fetchResult = queryStore.fetch(createFilter().custom(() => false)); @@ -525,7 +526,7 @@ registerSuite({ 'dataLength should return the number of items matching the Query Transform result\'s own queries': { 'fetch all'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }).filter((item) => item.value < 3); const fetchResult = queryStore.fetch(); @@ -538,7 +539,7 @@ registerSuite({ }, 'fetch with query'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }).filter((item) => item.value < 3); const fetchResult = queryStore.fetch(createFilter().custom((item) => item.value < 2)); @@ -551,7 +552,7 @@ registerSuite({ }, 'should be rejected if fetch errors'(this: any) { - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ storage: { fetch() { const result = Promise.reject(Error('Fetch failed')); @@ -578,7 +579,7 @@ registerSuite({ 'should continue to report correct data after multiple updates'(this: any) { const dfd = this.async(); - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }); @@ -624,7 +625,7 @@ registerSuite({ 'async storage': { 'filtered subcollection fetch should not return items when it is done before add.'(this: any) { - const { queryStore: store } = getStoreWithAsyncStorage(this, { put: 20, fetch: 10 }, false); + const { queryStore: store } = getStoreWithAsyncStorage(this, { put: 20, fetch: 1 }, false); const subcollection = store.filter(createFilter().greaterThanOrEqualTo('value', 2)); store.add(createData()); @@ -633,8 +634,8 @@ registerSuite({ }); }, 'should complete initial add before subsequent operations'(this: any) { - const asyncStorage = createAsyncStorage(); - const store = createQueryStore({ + const asyncStorage = new AsyncStorage(); + const store = new QueryStore({ storage: asyncStorage, data: createData() }); @@ -646,20 +647,19 @@ registerSuite({ 'failed initial add should not prevent subsequent operations'(this: any) { let fail = true; const stub = sinon.stub(console, 'error'); - const asyncStorage = createAsyncStorage - .around('add', function(add: () => Promise) { - return function(this: any) { - if (fail) { - fail = false; - return Promise.reject(Error('error')); - } - else { - return add.apply(this, arguments); - } - }; - })(); + const asyncStorage = new (class extends AsyncStorage { + add(items: any[], options?: CrudOptions): any { + if (fail) { + fail = false; + return Promise.reject(Error('error')); + } + else { + return super.add(items, options); + } + } + })(); const data = createData(); - const store = createQueryStore({ + const store = new QueryStore({ storage: asyncStorage, data: data }); diff --git a/tests/unit/store/mixins/createQueryTransformMixin/tracking.ts b/tests/unit/store/QueryableStore/tracking.ts similarity index 90% rename from tests/unit/store/mixins/createQueryTransformMixin/tracking.ts rename to tests/unit/store/QueryableStore/tracking.ts index a18dd6b..1661c22 100644 --- a/tests/unit/store/mixins/createQueryTransformMixin/tracking.ts +++ b/tests/unit/store/QueryableStore/tracking.ts @@ -1,22 +1,17 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; -import { createData, ItemType, createUpdates } from '../../../support/createData'; -import { ObservableStore } from '../../../../../src/store/mixins/createObservableStoreMixin'; -import { CrudOptions } from '../../../../../src/store/createStore'; -import { UpdateResults } from '../../../../../src/store/createStore'; -import { QueryStore, createQueryStore } from '../../../../../src/store/mixins/createQueryTransformMixin'; -import { TrackableStoreDelta, MappedQueryTransformResult } from '../../../../../src/store/createQueryTransformResult'; +import { createData, ItemType, createUpdates } from '../../support/createData'; +import createFilter from '../../../../src/query/createFilter'; +import QueryStore from '../../../../src/store/QueryableStore'; +import { TrackableStoreDelta, MappedQueryResultInterface } from '../../../../src/store/QueryResult'; import Promise from '@dojo/shim/Promise'; -import createFilter from '../../../../../src/query/createFilter'; registerSuite(function() { - let trackableQueryStore: QueryStore>>; + let trackableQueryStore: QueryStore; function testFetchingQueryStore( - trackedCollection: QueryStore>>, - trackResult: MappedQueryTransformResult< - ItemType, QueryStore>> - >, + trackedCollection: QueryStore, + trackResult: MappedQueryResultInterface>, dfd: any, isFetchingAroundUpdates = false ) { @@ -91,10 +86,8 @@ registerSuite(function() { }); } function testFetchingQueryStoreWithDelayedOperations( - trackedCollection: QueryStore>>, - trackResult: MappedQueryTransformResult< - ItemType, QueryStore>> - >, + trackedCollection: QueryStore, + trackResult: MappedQueryResultInterface>, dfd: any ) { return new Promise(function(resolve) { @@ -189,9 +182,9 @@ registerSuite(function() { } return { - name: 'Query-Transform Mixin - Tracking', + name: 'Queryable Store - Tracking', beforeEach: function() { - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData() }); }, @@ -314,7 +307,7 @@ registerSuite(function() { 'tracking with a range query': { 'full range'(this: any) { const dfd = this.async(1000); - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData(), fetchAroundUpdates: true }); @@ -329,7 +322,7 @@ registerSuite(function() { }, 'released with range query should only filter "afterAll"'(this: any) { const dfd = this.async(1000); - const trackableQueryStore = createQueryStore(); + const trackableQueryStore = new QueryStore(); const untrackedCollection = trackableQueryStore.range(0, 1).track().release(); const data = createData(); @@ -349,7 +342,7 @@ registerSuite(function() { }, 'full range - delay between operations'(this: any) { const dfd = this.async(5000); - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore @@ -363,7 +356,7 @@ registerSuite(function() { }, 'full range, not initially fetching around updates'(this: any) { const dfd = this.async(1000); - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore @@ -377,7 +370,7 @@ registerSuite(function() { }, 'item pushed into collection'(this: any) { const dfd = this.async(1000); - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore @@ -488,7 +481,7 @@ registerSuite(function() { 'add data after initialization': function(this: any) { const dfd = this.async(1000); - trackableQueryStore = createQueryStore(); + trackableQueryStore = new QueryStore(); let ignoreFirst = true; trackableQueryStore.query(createFilter().custom(() => true)).track().observe().subscribe(update => { if (ignoreFirst) { @@ -507,7 +500,7 @@ registerSuite(function() { 'should receive a notification of initial data': function(this: any) { const dfd = this.async(1000); - trackableQueryStore = createQueryStore({ + trackableQueryStore = new QueryStore({ data: createData() }); trackableQueryStore @@ -581,7 +574,7 @@ registerSuite(function() { const dfd = this.async(1000); const data = createData(); const updates = createUpdates()[0]; - const trackableQueryStore = createQueryStore(); + const trackableQueryStore = new QueryStore(); const trackedCollection = trackableQueryStore .filter(function(item: ItemType) { return item.value > 1; diff --git a/tests/unit/store/mixins/createQueryTransformMixin/transforming.ts b/tests/unit/store/QueryableStore/transforming.ts similarity index 98% rename from tests/unit/store/mixins/createQueryTransformMixin/transforming.ts rename to tests/unit/store/QueryableStore/transforming.ts index 0f3d977..fc44923 100644 --- a/tests/unit/store/mixins/createQueryTransformMixin/transforming.ts +++ b/tests/unit/store/QueryableStore/transforming.ts @@ -1,23 +1,23 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; -import { createData, patches } from '../../../support/createData'; -import { createQueryStore } from '../../../../../src/store/mixins/createQueryTransformMixin'; -import createFilter from '../../../../../src/query/createFilter'; +import { createData, patches } from '../../support/createData'; +import QueryStore from '../../../../src/store/QueryableStore'; +import createFilter from '../../../../src/query/createFilter'; import Set from '@dojo/shim/Set'; function getStoreAndDfd(test: any, useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const queryStore = createQueryStore({ + const queryStore = new QueryStore({ data: createData() }); - const emptyStore = createQueryStore(); + const emptyStore = new QueryStore(); return { dfd, queryStore, emptyStore }; } registerSuite({ - name: 'Query-Transform Mixin - Transform', + name: 'Queryable Store - Transform', 'single transformations'(this: any) { const { queryStore } = getStoreAndDfd(this, false); diff --git a/tests/unit/store/createStore.ts b/tests/unit/store/StoreBase.ts similarity index 91% rename from tests/unit/store/createStore.ts rename to tests/unit/store/StoreBase.ts index a210af9..f34f31c 100644 --- a/tests/unit/store/createStore.ts +++ b/tests/unit/store/StoreBase.ts @@ -1,10 +1,10 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; import * as sinon from 'sinon'; -import createStore, { StoreOperation } from '../../../src/store/createStore'; import Map from '@dojo/shim/Map'; -import Set from '@dojo/shim/Set'; import Promise from '@dojo/shim/Promise'; +import Set from '@dojo/shim/Set'; +import StoreBase from '../../../src/store/StoreBase'; import createRange from '../../../src/query/createStoreRange'; import createFilter from '../../../src/query/createFilter'; import JsonPointer from '../../../src/patch/JsonPointer'; @@ -14,20 +14,21 @@ import createOperation, { OperationType } from '../../../src/patch/createOperati import CompoundQuery from '../../../src/query/CompoundQuery'; import InMemoryStorage from '../../../src/storage/InMemoryStorage'; import { createData, ItemType, createUpdates, patches, patchedItems } from '../support/createData'; -import createAsyncStorage from '../support/AsyncStorage'; +import AsyncStorage from '../support/AsyncStorage'; +import { StoreOperation, CrudOptions } from '../../../src/interfaces'; function getStoreAndDfd(test: any, data = createData(), useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const store = createStore( { data: data } ); - const emptyStore = createStore(); + const store = new StoreBase( { data: data } ); + const emptyStore = new StoreBase(); return { dfd, store, emptyStore, data: createData() }; } function getStoreWithAsyncStorage(test: any, asyncOptions?: {}, useAsync = true) { const dfd = useAsync ? test.async(1000) : null; - const asyncStorage = createAsyncStorage(asyncOptions); - const store = createStore({ storage: asyncStorage }); + const asyncStorage = new AsyncStorage(asyncOptions); + const store = new StoreBase({ storage: asyncStorage }); return { dfd, store, asyncStorage }; } @@ -37,7 +38,7 @@ const ids = createData().map(function(item) { }); registerSuite({ - name: 'createStore', + name: 'StoreBase', 'initialize store'(this: any) { const { store, data } = getStoreAndDfd(this, createData(), false); @@ -170,7 +171,7 @@ registerSuite({ }); }, - 'should allow patching with an array of items'(this: any) { + 'should allow patching with an array of items - if default id property is used'(this: any) { const { dfd, store } = getStoreAndDfd(this); store.patch(createUpdates()[0]); store.fetch().then(function(data) { @@ -178,6 +179,27 @@ registerSuite({ }).then(dfd.resolve); }, + 'should use id property to identify patches'(this: any) { + type IdProp = { idProp: string; value: number } + const data: IdProp[] = [ + { idProp: 'item-1', value: 1 }, { idProp: 'item-2', value: 2 }, { idProp: 'item-3', value: 3 } + ]; + const store = new StoreBase({ + data: data, + idProperty: 'idProp' + }); + + return store.patch([ { id: 'item-1', value: 2} ]) + .then(() => store.fetch()) + .then((data) => { + assert.deepEqual(data, [ + { idProp: 'item-1', value: 2 }, + { idProp: 'item-2', value: 2 }, + { idProp: 'item-3', value: 3 } + ], 'Didn\'t patch record properly'); + }); + }, + 'should allow patching with an object and id in options'(this: any) { const { dfd, store } = getStoreAndDfd(this); const update = createUpdates()[0][0]; @@ -229,7 +251,7 @@ registerSuite({ const storage = new InMemoryStorage(); sinon.stub(storage, 'delete').returns(Promise.reject(Error('failed'))); - const store = createStore({ storage }); + const store = new StoreBase({ storage }); store.delete(ids[0]).then( dfd.rejectOnError(function () { @@ -329,11 +351,11 @@ registerSuite({ 'should allow a property or function to be specified as the id': function(this: any) { const data = createData(); const updates = createUpdates(); - const store = createStore({ + const store = new StoreBase({ data: updates[0], idProperty: 'value' }); - const idFunctionStore = createStore({ + const idFunctionStore = new StoreBase({ idFunction: (item: ItemType) => item.id + '-id', data: data }); @@ -380,7 +402,7 @@ registerSuite({ 'should generate unique ids': function(this: any) { const ids: Promise[] = []; - const store = createStore(); + const store = new StoreBase(); const generateNIds = 1000; // reduced to 1,000 since IE 11 took minutes to run 100,000 for (let i = 0; i < generateNIds; i++) { ids.push(store.createId()); @@ -401,7 +423,7 @@ registerSuite({ 'use catch': function(this: any) { const { dfd, data } = getStoreAndDfd(this); - const store = createStore({ + const store = new StoreBase({ data: [ data[0], data[1] ] }); const catchSpy = sinon.spy(); @@ -413,7 +435,7 @@ registerSuite({ 'add with conflicts should fail': function(this: any) { const { dfd, data } = getStoreAndDfd(this); - const store = createStore({ + const store = new StoreBase({ data: [ data[0], data[1] ] }); store.add(data).then(dfd.reject, dfd.resolve); @@ -428,7 +450,7 @@ registerSuite({ 'put with conflicts should override': function(this: any) { const { data } = getStoreAndDfd(this, undefined, false); - const store = createStore({ + const store = new StoreBase({ data: [ data[0], data[1] ] }); return store.put(data).then(function(result) { @@ -477,7 +499,7 @@ registerSuite({ 'async storage': { 'async operation should not be done immediately.'(this: any) { - const{ store } = getStoreWithAsyncStorage(this, { put: 50}, false); + const { store } = getStoreWithAsyncStorage(this, { put: 50}, false); const start = Date.now(); return store.add(createData()).then(function() { @@ -486,8 +508,8 @@ registerSuite({ }); }, 'should complete initial add before subsequent operations'(this: any) { - const asyncStorage = createAsyncStorage(); - const store = createStore({ + const asyncStorage = new AsyncStorage(); + const store = new StoreBase({ storage: asyncStorage, data: createData() }); @@ -499,20 +521,19 @@ registerSuite({ 'failed initial add should not prevent subsequent operations'(this: any) { let fail = true; const stub = sinon.stub(console, 'error'); - const asyncStorage = createAsyncStorage - .around('add', function(add: () => Promise) { - return function(this: any) { - if (fail) { - fail = false; - return Promise.reject(Error('error')); - } - else { - return add.apply(this, arguments); - } - }; - })(); + const asyncStorage = new (class extends AsyncStorage { + add(items: any[], options?: CrudOptions): any { + if (fail) { + fail = false; + return Promise.reject(Error('error')); + } + else { + return super.add(items, options); + } + } + })(); const data = createData(); - const store = createStore({ + const store = new StoreBase({ storage: asyncStorage, data: data }); diff --git a/tests/unit/store/materialize.ts b/tests/unit/store/materialize.ts index 5a2b43f..670a38d 100644 --- a/tests/unit/store/materialize.ts +++ b/tests/unit/store/materialize.ts @@ -2,11 +2,12 @@ import * as registerSuite from 'intern!object'; import * as assert from 'intern/chai!assert'; import * as sinon from 'sinon'; import { createData, ItemType, createUpdates } from '../support/createData'; -import { createQueryStore } from '../../../src/store/mixins/createQueryTransformMixin'; -import { createObservableStore, StoreDelta } from '../../../src/store/mixins/createObservableStoreMixin'; +import QueryStore from '../../../src/store/QueryableStore'; +import ObservableStore, { StoreDelta } from '../../../src/store/ObservableStore'; import materialize from '../../../src/store/materialize'; -import { CrudOptions, Store } from '../../../src/store/createStore'; +import { Store } from '../../../src/interfaces'; import { delay } from '@dojo/core/async/timing'; +import { QueryResultInterface } from '../../../src/store/QueryResult'; type TransformedObject = { _value: number; @@ -21,14 +22,14 @@ registerSuite({ 'Should apply updates to target store'(this: any) { const dfd = this.async(); - const targetStore = createObservableStore({ + const targetStore = new ObservableStore({ idProperty: '_id' }); - const trackableQueryStore = createQueryStore({ + const trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore - .transform((item) => ({ + .transform((item) => ({ _value: item.value, _nestedProperty: { _value: item.nestedProperty.value @@ -74,16 +75,18 @@ registerSuite({ } })); - materialize({ source: trackedCollection, target: targetStore }); + materialize< + TransformedObject, QueryResultInterface, Store + >({ source: trackedCollection, target: targetStore }); trackableQueryStore.delete('item-2'); }, 'Should stop applying updates after destroying handle'(this: any) { const dfd = this.async(); - const targetStore = createObservableStore({ + const targetStore = new ObservableStore({ idProperty: '_id' }); - const trackableQueryStore = createQueryStore({ + const trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore @@ -135,15 +138,17 @@ registerSuite({ } })); - const handle = materialize({ source: trackedCollection, target: targetStore }); + const handle = materialize< + TransformedObject, QueryResultInterface, Store + >({ source: trackedCollection, target: targetStore }); }, 'Should use apply function if provided'(this: any) { const dfd = this.async(); - const targetStore = createObservableStore({ + const targetStore = new ObservableStore({ idProperty: '_id' }); - const trackableQueryStore = createQueryStore({ + const trackableQueryStore = new QueryStore({ data: createData() }); const trackedCollection = trackableQueryStore @@ -170,7 +175,9 @@ registerSuite({ })); let initialUpdateFromSource = false; - materialize({ + materialize< + TransformedObject, QueryResultInterface, Store + >({ source: trackedCollection, target: targetStore, apply: dfd.rejectOnError((target: Store, { afterAll, deletes }: StoreDelta, source: any) => { @@ -205,8 +212,8 @@ registerSuite({ 'Shouldn\'t make any updates if initial update is empty'(this: any) { const dfd = this.async(1000); - const targetStore = createQueryStore(); - const trackableQueryStore = createQueryStore(); + const targetStore = new QueryStore(); + const trackableQueryStore = new QueryStore(); const trackedCollection = trackableQueryStore.filter(() => true).track(); targetStore.add = dfd.reject.bind(dfd, Error('Shouldn\'t have called add on targetStore')); @@ -220,8 +227,8 @@ registerSuite({ }, 'Should add new items to targetStore'(this: any) { - const targetStore = createQueryStore(); - const trackableQueryStore = createQueryStore(); + const targetStore = new QueryStore(); + const trackableQueryStore = new QueryStore(); const trackedCollection = trackableQueryStore.filter(() => true).track(); const spy = sinon.spy(targetStore, 'add'); @@ -240,8 +247,8 @@ registerSuite({ }, 'Should update items in target store'(this: any) { - const targetStore = createQueryStore(); - const trackableQueryStore = createQueryStore(); + const targetStore = new QueryStore(); + const trackableQueryStore = new QueryStore(); const trackedCollection = trackableQueryStore.filter(() => true).track(); const spy = sinon.spy(targetStore, 'put'); diff --git a/tests/unit/store/mixins/createTransactionMixin.ts b/tests/unit/store/mixins/createTransactionMixin.ts deleted file mode 100644 index 2c2505c..0000000 --- a/tests/unit/store/mixins/createTransactionMixin.ts +++ /dev/null @@ -1,144 +0,0 @@ -import * as registerSuite from 'intern!object'; -import * as assert from 'intern/chai!assert'; -import createStore, { StoreOperation, StoreOptions, CrudOptions, Store, UpdateResults} from '../../../../src/store/createStore'; -import createTransactionMixin, { TransactionStore } from '../../../../src/store/mixins/createTransactionMixin'; -import { createData, ItemType, createUpdates, patches, patchedItems } from '../../support/createData'; -import { ComposeFactory } from '@dojo/compose/compose'; -import createAsyncStorage from '../../support/AsyncStorage'; - -interface TransactionStoreFactory extends ComposeFactory, any> { - , C extends Store>(options?: StoreOptions): TransactionStore; -} - -const createTransactionStore: TransactionStoreFactory = createStore - .mixin(createTransactionMixin()); - -registerSuite(function(){ - - function getStoreAndDfd(test: any, useAsync = true) { - const dfd = useAsync ? test.async(1000) : null; - - const transactionStore = createTransactionStore(); - - return { dfd, transactionStore, data: createData() }; - } - - return { - name: 'createTransactionMixin', - - 'should allow chaining of operations'(this: any) { - const { dfd, transactionStore, data } = getStoreAndDfd(this); - const updates = createUpdates(); - - transactionStore.transaction() - .add(data) - .put(updates[0]) - .delete(data[0].id) - .commit() - .subscribe( - function next() { - }, - function error() { - }, - function completed() { - transactionStore.fetch().then(dfd.callback(function(data: ItemType[]) { - assert.deepEqual(data, updates[0].slice(1)); - })); - } - ); - }, - 'should receive all action results in order at once in an array.'(this: any) { - const { dfd, transactionStore, data } = getStoreAndDfd(this); - const updates = createUpdates(); - transactionStore.transaction() - .add(data) - .put(updates[0]) - .delete(data[0].id) - .commit() - .subscribe(dfd.callback(function(result: UpdateResults[]) { - assert.lengthOf(result, 3); - assert.strictEqual(result[0].type, StoreOperation.Add, '1st action should be of type "Add"'); - assert.deepEqual(result[0].successfulData, createData()); - assert.strictEqual(result[1].type, StoreOperation.Put, '2nd action should be of type "Put"'); - assert.deepEqual(result[1].successfulData, createUpdates()[0]); - assert.strictEqual(result[2].type, StoreOperation.Delete, '3rd action should be of type "Delete"'); - assert.deepEqual(result[2].successfulData, ['item-1']); - dfd.resolve(); - })); - }, - 'Patch which operates in place should not update previous operation result.'(this: any) { - const { dfd, transactionStore, data } = getStoreAndDfd(this); - transactionStore.transaction() - .add(data) - .patch(patches[0]) - .commit() - .subscribe(dfd.callback(function(result: UpdateResults[]) { - assert.lengthOf(result, 2); - assert.strictEqual(result[0].type, StoreOperation.Add, '1st action should be of type "Add"'); - assert.deepEqual(result[0].successfulData, createData()); - assert.strictEqual(result[1].type, StoreOperation.Patch, '2nd action should be of type "Patch"'); - assert.deepEqual(result[1].successfulData, [ - patchedItems[0] - ]); - dfd.resolve(); - })); - }, - 'should resolve as a thenable when all parts of a transaction have completed': function(this: any) { - const { transactionStore, data } = getStoreAndDfd(this, false); - const updates = createUpdates(); - - return transactionStore.transaction() - .add(data) - .put(updates[0]) - .delete(data[0].id) - .commit() - .then(function() { - return transactionStore.fetch().then(function(data) { - assert.deepEqual(data, [ updates[0][1], updates[0][2] ], - 'Transaction didn\'t properly resolve after all operations completed'); - }); - }); - }, - 'should be able to abort and start a new transaction.': function(this: any) { - const { transactionStore, data } = getStoreAndDfd(this, false); - const updates = createUpdates(); - - transactionStore.transaction() - .add(data) - .put(updates[0]) - .delete(data[0].id) - .abort() - - .transaction() - .add(data) - .put(updates[0]) - .commit() - .then(function() { - return transactionStore.fetch().then(function(data) { - assert.deepEqual(data, updates[0], - 'Transaction didn\'t properly resolve after all operations completed'); - }); - }); - }, - 'should queue up operations in order, regardless of the behavior of the async storage.'(this: any) { - const transactionStore = createTransactionStore({ - storage: createAsyncStorage({ delete: 10, put: 30 }) - }); - const data = createData(); - const updates = createUpdates(); - - return transactionStore.transaction() - .add(data) - .put(updates[0]) - .delete(data[0].id) - .commit() - .then(function() { - // TODO - remove any type on data - return transactionStore.fetch().then(function(data: any[]) { - assert.deepEqual(data, [ updates[0][1], updates[0][2] ], - 'Transaction didn\'t properly resolve after all operations completed'); - }); - }); - } - }; -}()); diff --git a/tests/unit/support/AsyncStorage.ts b/tests/unit/support/AsyncStorage.ts index 33a0297..6c766ff 100644 --- a/tests/unit/support/AsyncStorage.ts +++ b/tests/unit/support/AsyncStorage.ts @@ -1,80 +1,65 @@ import InMemoryStorage from '../../../src/storage/InMemoryStorage'; -import Map from '@dojo/shim/Map'; import Promise from '@dojo/shim/Promise'; -import WeakMap from '@dojo/shim/WeakMap'; import { delay } from '@dojo/core/async/timing'; -import compose from '@dojo/compose/compose'; - -const instanceStateMap = new WeakMap<{}, any>(); +import { Query, FetchResult, CrudOptions } from '../../../src/interfaces'; +import Patch from '../../../src/patch/Patch'; function getRandomInt(max = 100) { return Math.floor(Math.random() * max); } -function delayOperation(operation: Function, operationName: string) { - return function(this: any, ...args: any[]) { - const state = instanceStateMap.get(this); - const milliseconds = state[operationName] || getRandomInt(); - return delay(milliseconds)(operation.bind(this, ...args)); - }; -} +export default class AsyncStorage extends InMemoryStorage { + timing: { [ index: string ]: number | undefined }; + + constructor(options?: any) { + super(options); + this.timing = options || {}; + } + + get(ids: string[]): Promise { + return delay(this.timing['get'] || getRandomInt())(() => super.get(ids)); + } + + createId() { + return delay(this.timing['createId'] || getRandomInt())(() => super.createId()); + } -const createAsyncStorage = compose(InMemoryStorage).mixin({ - initialize(instance: any, options: any = {}) { - instance.data = []; - instance.index = new Map(); - instance.idProperty = options.idProperty; - instance.idFunction = options.idFunction; - instance.returnsPromise = Promise.resolve(); + put(items: T[], options?: CrudOptions) { + return delay(this.timing['put'] || getRandomInt())(() => super.put(items, options)); } -}).mixin({ - initialize(instance, asyncOptions = {}) { - instanceStateMap.set(instance, asyncOptions); - }, - aspectAdvice: { - around: { - createId(createId: Function) { - return delayOperation(createId, 'createId'); - }, - fetch(fetch: Function) { - const delayed = delayOperation(fetch, 'fetch'); - return function(this: any, ...args: any[]) { - let resolveTotalLength: (totalLength: number) => void; - let rejectTotalLength: (error: any) => void; - const totalLength = new Promise((resolve, reject) => { - resolveTotalLength = resolve; - rejectTotalLength = reject; - }); - const returnPromise = delayed.bind(this, ...args)(); - returnPromise.totalLength = returnPromise.dataLength = totalLength; - delayed.bind(this)().then( - (fullResults: any) => { - resolveTotalLength(fullResults.length); - }, - (error: any) => { - rejectTotalLength(error); - } - ); - return returnPromise; - }; - }, - get(get: Function) { - return delayOperation(get, 'get'); - }, - add(add: Function) { - return delayOperation(add, 'put'); - }, - put(put: Function) { - return delayOperation(put, 'put'); - }, - delete(_delete: Function) { - return delayOperation(_delete, 'delete'); - }, - patch(patch: Function) { - return delayOperation(patch, 'patch'); - } - } + add(items: T[], options?: CrudOptions) { + return delay(this.timing['add'] || getRandomInt())(() => super.add(items, options)); } -}); -export default createAsyncStorage; + + delete(ids: string[]) { + return delay(this.timing['delete'] || getRandomInt())(() => super.delete(ids)); + } + + patch(updates: { id: string; patch: Patch }[]) { + return delay(this.timing['patch'] || getRandomInt())(() => super.patch(updates)); + } + + fetch(query?: Query) { + let totalLengthResolve: () => void; + let totalLengthReject: () => void; + let fetchResultResolve: () => void; + let fetchResultReject: () => void; + const totalLengthPromise = new Promise((resolve, reject) => { + totalLengthResolve = resolve; + totalLengthReject = reject; + }); + const fetchResult: FetchResult = new Promise((resolve, reject) => { + fetchResultResolve = resolve; + fetchResultReject = reject; + }); + fetchResult.totalLength = fetchResult.dataLength = totalLengthPromise; + setTimeout(() => { + const result = super.fetch(); + result.then(fetchResultResolve, fetchResultReject); + result.totalLength.then(totalLengthResolve, totalLengthReject); + }, this.timing['fetch'] || getRandomInt()); + + return fetchResult; + } +}