Skip to content

Commit

Permalink
Use sync.js in Vuex actions; resolves #479.
Browse files Browse the repository at this point in the history
  • Loading branch information
jgaehring committed Jan 7, 2022
1 parent 62e0872 commit 35aae84
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 121 deletions.
53 changes: 30 additions & 23 deletions src/core/http/sync.js
Original file line number Diff line number Diff line change
@@ -1,42 +1,49 @@
import { allPass, insert } from 'ramda';
import {
allPass, insert, reduce,
} from 'ramda';
import farm from '../farm';
import parseFilter from '../utils/parseFilter';

const syncEntity = (shortName, { cache = [], filter, limit = Infinity }) =>
farm[shortName].fetch({ filter, limit }).then((fetchResults) => {
const mergedResults = fetchResults.data.reduce((collection, remote) => {
export const fetchEntities = (shortName, { cache = [], filter, limit }) =>
farm[shortName].fetch({ filter, limit }).then((results) => {
const { data, fulfilled, rejected } = results;
const entities = data.reduce((collection, remote) => {
const i = collection.findIndex(ent => ent.id === remote.id);
const merged = farm[shortName].merge(collection[i], remote);
return insert(i, merged, collection);
}, cache);
const failedBundles = fetchResults.rejected.map(({ response = {} }) => {
return { data: entities, fulfilled, rejected };
});

export const syncEntities = (shortName, { cache = [], filter, limit }) =>
fetchEntities(shortName, { cache, filter, limit }).then((fetchResults) => {
const { data: mergedEntities } = fetchResults;
const failedBundleNames = fetchResults.rejected.map(({ response = {} }) => {
const { config: { url } } = response;
const bundleName = url.split('?')[0].split('/').pop();
return bundleName;
});
const predicate = allPass([
parseFilter(filter),
entity => failedBundles.every(b => b !== entity.type),
entity => failedBundleNames.every(b => b !== entity.type),
farm.meta.isUnsynced,
]);
const syncables = mergedResults.filter(predicate);
const syncables = mergedEntities.filter(predicate);
const sendRequests = syncables.map(farm[shortName].send);
return Promise.allSettled(sendRequests)
.then(sendResults => sendResults.reduce((result, { status, reason, value: remote }) => {
const { data, fulfilled, rejected } = result;
if (status === 'rejected') {
return {
...result,
rejected: [...rejected, reason],
};
}
const i = syncables.findIndex(ent => ent.id === remote.id);
const merged = farm[shortName].merge(syncables[i], remote);
const handleSendResults = reduce((result, { status, reason, value: remote }) => {
const { data, fulfilled, rejected } = result;
if (status === 'rejected') {
return {
data: insert(i, merged, data),
fulfilled: [...fulfilled, remote],
...result,
rejected: [...rejected, reason],
};
}, fetchResults));
}
const i = syncables.findIndex(ent => ent.id === remote.id);
const merged = farm[shortName].merge(syncables[i], remote);
return {
data: insert(i, merged, data),
fulfilled: [...fulfilled, remote],
};
}, fetchResults);
return Promise.allSettled(sendRequests).then(handleSendResults);
});

export default syncEntity;
34 changes: 13 additions & 21 deletions src/core/idb/cache.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import {
anyPass, complement, compose,
} from 'ramda';
import { anyPass, complement } from 'ramda';
import farm from '../farm';
import nomenclature from '../store/nomenclature';
import {
deleteRecord, getRecords, saveRecord,
} from '.';
import syncEntity from '../http/sync';
import { syncEntities } from '../http/sync';
import parseFilter from '../utils/parseFilter';
import flattenEntity from '../utils/flattenEntity';
import daysAway from '../utils/daysAway';
import SyncError from '../http/SyncError';

Expand Down Expand Up @@ -41,13 +38,10 @@ export const cachingCriteria = (options = {}) => {

export const cacheEntity = (name, entity, options) => {
const criteria = cachingCriteria(options)[name];
const meetsCriteria = compose(
anyPass([
parseFilter(criteria),
farm.meta.isUnsynced,
]),
flattenEntity,
);
const meetsCriteria = anyPass([
parseFilter(criteria),
farm.meta.isUnsynced,
]);
if (meetsCriteria(entity)) {
return saveRecord('entities', name, entity);
}
Expand All @@ -56,13 +50,10 @@ export const cacheEntity = (name, entity, options) => {

export const purgeCache = () => {
const criteria = cachingCriteria();
const meetsCriteria = name => compose(
complement(anyPass([
parseFilter(criteria[name]),
farm.meta.isUnsynced,
])),
flattenEntity,
);
const meetsCriteria = name => complement(anyPass([
parseFilter(criteria[name]),
farm.meta.isUnsynced,
]));
const dbRequests = Object.keys(nomenclature.entities)
.map(name => deleteRecord('entities', name, meetsCriteria(name)));
return Promise.all(dbRequests);
Expand All @@ -74,9 +65,10 @@ export const syncCache = async () => {
const { lastSync } = settings;
const requests = Object.values(nomenclature.entities).map(async ({ name, shortName }) => {
const criteria = cachingCriteria({ now })[name];
const filter = lastSync ? { ...criteria, changed: lastSync } : criteria;
const changed = { $gt: lastSync };
const filter = changed.$gt ? { ...criteria, changed } : criteria;
const cache = await getRecords('entities', name);
const syncResults = await syncEntity(shortName, { filter, cache, limit: Infinity });
const syncResults = await syncEntities(shortName, { filter, cache, limit: Infinity });
const cacheRequests = syncResults.data.map(e => cacheEntity(name, e, criteria));
const cacheResults = await Promise.allSettled(cacheRequests);
const failedToCache = cacheResults.some(({ status }) => status === 'rejected');
Expand Down
16 changes: 5 additions & 11 deletions src/core/idb/cache.test.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import {
anyPass, complement, compose,
} from 'ramda';
import { anyPass, complement } from 'ramda';
import farm from '../farm';
import { cachingCriteria } from './cache';
import parseFilter from '../utils/parseFilter';
import flattenEntity from '../utils/flattenEntity';
import daysAway from '../utils/daysAway';

const now = new Date().toISOString();
Expand All @@ -22,13 +19,10 @@ const unsyncedMetadata = {
};

const criteria = cachingCriteria({ now }).log;
const meetsCachingCriteria = compose(
anyPass([
parseFilter(criteria),
farm.meta.isUnsynced,
]),
flattenEntity,
);
const meetsCachingCriteria = anyPass([
parseFilter(criteria),
farm.meta.isUnsynced,
]);
const meetsEvictionCriteria = complement(meetsCachingCriteria);

describe('cachingCriteria', () => {
Expand Down
86 changes: 25 additions & 61 deletions src/core/store/entities.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import Vue from 'vue';
import {
allPass, anyPass, compose, reduce,
} from 'ramda';
import { anyPass } from 'ramda';
import farm from '../farm';
import nomenclature from './nomenclature';
import { deleteRecord, getRecords } from '../idb';
import { cacheEntity } from '../idb/cache';
import SyncError from '../http/SyncError';
import upsert from '../utils/upsert';
import parseFilter from '../utils/parseFilter';
import flattenEntity from '../utils/flattenEntity';
import { fetchEntities, syncEntities } from '../http/sync';

function parseFilterWithOptions(filter, options = {}) {
const predicates = [parseFilter(filter)];
Expand Down Expand Up @@ -38,8 +36,7 @@ export default {
upsert(state[shortPlural], 'id', entity);
},
filterEntities(state, { shortPlural, predicate }) {
state[shortPlural] = state[shortPlural]
.filter(compose(predicate, flattenEntity));
state[shortPlural] = state[shortPlural].filter(predicate);
},
updateEntity(state, payload) {
const { shortPlural, index, entity } = payload;
Expand Down Expand Up @@ -99,8 +96,7 @@ export default {
const { shortPlural } = nomenclature.entities[name];
const predicate = parseFilterWithOptions(filter, options);
commit('filterEntities', { shortPlural, predicate });
const query = compose(predicate, flattenEntity);
return getRecords('entities', name, query).then((results) => {
return getRecords('entities', name, predicate).then((results) => {
const data = results.map((entity) => {
commit('upsertEntity', { shortPlural, entity });
return entity;
Expand All @@ -110,71 +106,39 @@ export default {
throw new Error({ data: [], fulfilled: [], rejected: [e] });
});
},
fetchEntities({ commit, dispatch, state }, { name, filter, options }) {
fetchEntities({ commit, dispatch, state }, payload) {
const {
name, filter, limit = Infinity, options,
} = payload;
const { shortName, shortPlural } = nomenclature.entities[name];
const now = new Date().toISOString();
const criteria = { now, uid: state.profile.user.id };
return dispatch('loadEntities', { name, filter, options })
.then(() => farm[shortName].fetch({ filter }))
.then(({ data }) => fetchEntities(shortName, { cache: data, filter, limit }))
.then((results) => {
const now = new Date().toISOString();
const criteria = { now, uid: state.profile.user.id };
results.data.forEach((remote) => {
const local = state[shortPlural].find(ent => ent.id === remote.id);
const entity = farm[shortName].merge(local, remote);
results.data.forEach((entity) => {
commit('upsertEntity', { shortPlural, entity });
cacheEntity(name, entity, criteria);
});
return errorInterceptor(results);
});
},
syncEntities({ commit, dispatch, state }, { name, filter, options }) {
syncEntities({ commit, dispatch, state }, payload) {
const {
name, filter, limit = Infinity, options,
} = payload;
const { shortName, shortPlural } = nomenclature.entities[name];
const now = Date.now();
const now = new Date().toISOString();
const criteria = { now, uid: state.profile.user.id };
const handleSendResults = reduce((results, { status, reason, value: remote }) => {
const { fulfilled, rejected } = results;
if (status === 'rejected') {
return {
...results,
rejected: [...rejected, reason],
};
}
const local = state[shortPlural].find(ent => ent.id === remote.id);
const entity = farm[shortName].merge(local, remote);
commit('upsertEntity', { shortPlural, entity });
cacheEntity(name, entity, criteria);
upsert(results.data, 'id', remote);
return {
...results,
fulfilled: [...fulfilled, remote],
};
});
return dispatch('fetchEntities', { name, filter, options })
.catch((e) => {
if (e.fulfilled?.length > 0 && !e.loginRequired) {
const { data, fulfilled, rejected } = e;
return { data, fulfilled, rejected };
}
throw e;
})
.then((fetchResults) => {
const failedBundles = fetchResults.rejected.map(({ response = {} }) => {
const { config: { url } } = response;
const bundle = url.split('?')[0].split('/').pop();
return bundle;
return dispatch('loadEntities', { name, filter, options })
.then(({ data }) => syncEntities(shortName, { cache: data, filter, limit }))
.then((results) => {
results.data.forEach((entity) => {
commit('upsertEntity', { shortPlural, entity });
cacheEntity(name, entity, criteria);
});
const excludeFailedBundles = entity =>
failedBundles.every(b => b !== entity.type);
const predicate = allPass([
parseFilterWithOptions(filter, options),
excludeFailedBundles,
farm.meta.isUnsynced,
]);
const entities = state[shortPlural]
.filter(compose(predicate, flattenEntity));
const requests = entities.map(farm[shortName].send);
return Promise.allSettled(requests)
.then(handleSendResults(fetchResults));
}).then(errorInterceptor);
return errorInterceptor(results);
});
},
},
};
4 changes: 2 additions & 2 deletions src/core/utils/flattenEntity.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const flattenEntity = ({
id, type, meta, attributes, relationships,
}) => ({
id, type, meta, attributes = {}, relationships = {},
} = {}) => ({
id, type, meta, ...attributes, ...relationships,
});

Expand Down
13 changes: 10 additions & 3 deletions src/core/utils/parseFilter.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
import {
allPass, any, anyPass, compose, equals, init, last, map, none, prop, T, when,
} from 'ramda';
import flattenEntity from './flattenEntity';

// NB: These are not curried, so can only be used as single arity functions.
const safeAny = predicate => (data = []) => any(predicate)(data);
const safeNone = predicate => (data = []) => none(predicate)(data);

const operators = {
$and: compose(allPass, map(parseFilter)),
Expand All @@ -12,8 +17,8 @@ const operators = {
$gte: bound => data => data >= bound,
$lt: bound => data => data < bound,
$lte: bound => data => data <= bound,
$in: compose(any, parseFilter),
$nin: compose(none, parseFilter),
$in: compose(safeAny, parseFilter),
$nin: compose(safeNone, parseFilter),
};

const isNumber = n => !Number.isNaN(+n);
Expand Down Expand Up @@ -48,7 +53,7 @@ function parseField([key, value]) {
return compose(predicate, prop(key));
}

export default function parseFilter(filter = {}) {
function parseFilter(filter = {}) {
if (Array.isArray(filter)) {
return operators.$or(filter);
}
Expand All @@ -57,3 +62,5 @@ export default function parseFilter(filter = {}) {
if (entries.length === 0) return T;
return allPass(entries.map(parseField));
}

export default filter => compose(parseFilter(filter), flattenEntity);

0 comments on commit 35aae84

Please sign in to comment.