Skip to content

Commit

Permalink
Add limit option to fetch requests; resolves farmOS#42.
Browse files Browse the repository at this point in the history
  • Loading branch information
jgaehring committed Nov 27, 2021
1 parent 195b3f0 commit d14283e
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 17 deletions.
51 changes: 36 additions & 15 deletions src/client/adapter/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import append from 'ramda/src/append.js';
import chain from 'ramda/src/chain.js';
import compose from 'ramda/src/compose.js';
import concat from 'ramda/src/concat.js';
import evolve from 'ramda/src/evolve.js';
import mapObjIndexed from 'ramda/src/mapObjIndexed.js';
import map from 'ramda/src/map.js';
import path from 'ramda/src/path.js';
import reduce from 'ramda/src/reduce.js';
import client from '../index.js';
import entities, { entityMethods } from '../../entities.js';
Expand All @@ -9,6 +14,8 @@ import {
transformFetchResponse, transformSendResponse,
} from './transformations.js';

const DRUPAL_PAGE_LIMIT = 50;

function parseBundles(filter, validTypes) {
const bundles = [];
// The filter must either be an object (logical $and) or an array (logical $or).
Expand Down Expand Up @@ -47,31 +54,42 @@ function parseBundles(filter, validTypes) {
}

const aggregateBundles = reduce((aggregate, result) => {
const { data, fulfilled, rejected } = aggregate;
const { reason, value, status } = result;
if (status === 'fulfilled') {
return {
data: data.concat(value.data.data),
fulfilled: fulfilled.concat(value),
rejected,
};
const nextData = chain(path(['data', 'data']), value);
return evolve({
data: concat(nextData),
fulfilled: concat(value),
}, aggregate);
}
return {
data,
fulfilled,
rejected: rejected.concat(reason),
};
return evolve({
rejected: append(reason),
}, aggregate);
}, { data: [], fulfilled: [], rejected: [] });

export default function adapter(model, opts) {
const { host, ...rest } = opts;
const { host, maxPageLimit = DRUPAL_PAGE_LIMIT, ...rest } = opts;
const connection = client(host, rest);
const initSchemata = model.schema.get();
let filterTransforms = generateFilterTransforms(initSchemata);
model.schema.on('set', (schemata) => {
filterTransforms = generateFilterTransforms(schemata);
});

// For chaining consecutive requests for the next page of resources until the
// provided limit is reached, or there are no further resources to fetch.
const chainRequests = (req, limit, prev = [], total = 0) => req.then((res) => {
const next = path(['data', 'links', 'next', 'href'], res);
const resLength = path(['data', 'data', 'length'], res);
const newTotal = total + resLength;
const all = prev.concat(res);
if (!next || newTotal >= limit) return all;
const remainder = limit - newTotal;
const url = remainder < maxPageLimit ? `${next}&page[limit]=${remainder}` : next;
const nextReq = connection.request(url);
return chainRequests(nextReq, limit, all, newTotal);
});

return {
...connection,
schema: {
Expand All @@ -96,11 +114,14 @@ export default function adapter(model, opts) {
},
...entityMethods(({ nomenclature: { name, shortName } }) => ({
...connection[shortName],
fetch: ({ filter }) => {
fetch: ({ filter, limit }) => {
const validTypes = Object.keys(model.schema.get(name));
const bundles = parseBundles(filter, validTypes);
const bundleRequests = bundles.map(({ name: bundle, filter: bundleFilter }) =>
connection[shortName].fetch(bundle, { filter: bundleFilter, filterTransforms }));
const bundleRequests = bundles.map(({ name: bundle, filter: bundleFilter }) => {
const fetchOptions = { filter: bundleFilter, filterTransforms, limit };
const req = connection[shortName].fetch(bundle, fetchOptions);
return chainRequests(req, limit);
});
const handleBundleResponse = compose(
transformFetchResponse(name),
aggregateBundles,
Expand Down
9 changes: 7 additions & 2 deletions src/client/request.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,17 @@ import has from 'ramda/src/has.js';
import ifElse from 'ramda/src/ifElse.js';
import parseFilter from './parseFilter.js';

const parseLimit = limit =>
(Number.isInteger(limit) && limit > 0 ? `&page[limit]=${limit}` : '');
const parseFetchParams = ({ filter = {}, filterTransforms, limit }) =>
parseFilter(filter, { filterTransforms }) + parseLimit(limit);

export default function farmRequest(client) {
const request = (endpoint, { method = 'GET', ...data } = {}) =>
client(endpoint, { method, data: JSON.stringify(data) });

const fetchEntity = entity => (bundle, { filter = {}, filterTransforms } = {}) =>
request(`/api/${entity}/${bundle}?${parseFilter(filter, { filterTransforms })}`);
const fetchEntity = entity => (bundle, options = {}) =>
request(`/api/${entity}/${bundle}?${parseFetchParams(options)}`);

const postEntity = entity => (bundle, data) =>
request(`/api/${entity}/${bundle}`, { method: 'POST', data });
Expand Down
21 changes: 21 additions & 0 deletions test/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,25 @@ describe('farmOS', function () {
})
.catch(reportError);
});
it('accepts a limit option of 5 on fetch requests', () => farm.asset.fetch({
filter: { type: 'animal' },
limit: 5,
}).then(({ data, fulfilled }) => {
expect(data).to.have.lengthOf(5);
expect(fulfilled).to.have.lengthOf(1);
}));
it('accepts a limit option of 150 on fetch requests', () => farm.asset.fetch({
filter: { type: 'animal' },
limit: 150,
}).then(({ data, fulfilled }) => {
expect(data).to.have.lengthOf(150);
expect(fulfilled).to.have.lengthOf(3);
}));
it('accepts a limit option of Infinity on fetch requests', () => farm.asset.fetch({
filter: { type: 'animal' },
limit: Infinity,
}).then(({ data, fulfilled }) => {
expect(data).to.have.lengthOf.within(200, 250);
expect(fulfilled).to.have.lengthOf(5);
}));
});

0 comments on commit d14283e

Please sign in to comment.