Skip to content

Commit

Permalink
feat(gatsby-source-contentful): Increase Contentful sync by up to 10x (
Browse files Browse the repository at this point in the history
…#30422) (#30643)

Co-authored-by: Ward Peeters <ward@coding-tech.com>
(cherry picked from commit b9791fe)

Co-authored-by: Benedikt Rötsch <axe312ger@users.noreply.github.com>
  • Loading branch information
GatsbyJS Bot and axe312ger authored Apr 2, 2021
1 parent b3315a0 commit fc61c88
Show file tree
Hide file tree
Showing 5 changed files with 189 additions and 16 deletions.
140 changes: 140 additions & 0 deletions packages/gatsby-source-contentful/src/__tests__/fetch-backoff.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
/**
* @jest-environment node
*/

import nock from "nock"
import fetchData from "../fetch"
import { createPluginConfig } from "../plugin-options"

const host = `localhost`
const options = {
spaceId: `12345`,
accessToken: `67890`,
host,
contentfulClientConfig: {
retryLimit: 2,
},
}

const baseURI = `https://${host}`

const start = jest.fn()
const end = jest.fn()
const mockActivity = {
start,
end,
tick: jest.fn(),
done: end,
}

const reporter = {
info: jest.fn(),
verbose: jest.fn(),
warn: jest.fn(),
panic: jest.fn(e => {
throw e
}),
activityTimer: jest.fn(() => mockActivity),
createProgress: jest.fn(() => mockActivity),
}

const pluginConfig = createPluginConfig(options)

describe(`fetch-backoff`, () => {
afterEach(() => {
nock.cleanAll()
reporter.verbose.mockClear()
reporter.panic.mockClear()
reporter.warn.mockClear()
})

test(`backoffs page limit when limit is reached`, async () => {
jest.setTimeout(30000)
const scope = nock(baseURI)
// Space
.get(`/spaces/${options.spaceId}/`)
.reply(200, { items: [] })
// Locales
.get(`/spaces/${options.spaceId}/environments/master/locales`)
.reply(200, { items: [{ code: `en`, default: true }] })
// Sync with 1000 (to much)
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=1000`
)
.times(1)
.reply(400, {
sys: { type: `Error`, id: `BadRequest` },
message: `Response size too big. Maximum allowed response size: 512000B.`,
requestId: `12345`,
})
// Sync with 666 (still to much)
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=666`
)
.times(1)
.reply(400, {
sys: { type: `Error`, id: `BadRequest` },
message: `Response size too big. Maximum allowed response size: 512000B.`,
requestId: `12345`,
})
// Sync with 444
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=444`
)
.reply(200, { items: [] })
// Content types
.get(
`/spaces/${options.spaceId}/environments/master/content_types?skip=0&limit=1000&order=sys.createdAt`
)
.reply(200, { items: [] })

await fetchData({ pluginConfig, reporter })

expect(reporter.panic).not.toBeCalled()
expect(reporter.warn.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"The sync with Contentful failed using pageLimit 1000 as the reponse size limit of the API is exceeded.
Retrying sync with pageLimit of 666",
],
Array [
"The sync with Contentful failed using pageLimit 666 as the reponse size limit of the API is exceeded.
Retrying sync with pageLimit of 444",
],
Array [
"We recommend you to set your pageLimit in gatsby-config.js to 444 to avoid failed synchronizations.",
],
]
`)
expect(scope.isDone()).toBeTruthy()
})

test(`does not backoff page limit when limit is not reached`, async () => {
jest.setTimeout(30000)
const scope = nock(baseURI)
// Space
.get(`/spaces/${options.spaceId}/`)
.reply(200, { items: [] })
// Locales
.get(`/spaces/${options.spaceId}/environments/master/locales`)
.reply(200, { items: [{ code: `en`, default: true }] })
// Sync with 1000 (no limit exceeded)
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=1000`
)
.reply(200, { items: [] })
// Content types
.get(
`/spaces/${options.spaceId}/environments/master/content_types?skip=0&limit=1000&order=sys.createdAt`
)
.reply(200, { items: [] })

await fetchData({ pluginConfig, reporter })

expect(reporter.panic).not.toBeCalled()
expect(reporter.warn).not.toBeCalled()
expect(scope.isDone()).toBeTruthy()
})
})
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,17 @@ describe(`fetch-retry`, () => {
.reply(200, { items: [{ code: `en`, default: true }] })
// Sync
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=100`
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=1000`
)
.times(1)
.replyWithError({ code: `ETIMEDOUT` })
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=100`
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=1000`
)
.reply(200, { items: [] })
// Content types
.get(
`/spaces/${options.spaceId}/environments/master/content_types?skip=0&limit=100&order=sys.createdAt`
`/spaces/${options.spaceId}/environments/master/content_types?skip=0&limit=1000&order=sys.createdAt`
)
.reply(200, { items: [] })

Expand All @@ -90,7 +90,7 @@ describe(`fetch-retry`, () => {
.reply(200, { items: [{ code: `en`, default: true }] })
// Sync
.get(
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=100`
`/spaces/${options.spaceId}/environments/master/sync?initial=true&limit=1000`
)
.times(3)
.reply(
Expand Down
2 changes: 1 addition & 1 deletion packages/gatsby-source-contentful/src/__tests__/fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ it(`calls contentful.getContentTypes with default page limit`, async () => {

expect(reporter.panic).not.toBeCalled()
expect(mockClient.getContentTypes).toHaveBeenCalledWith({
limit: 100,
limit: 1000,
order: `sys.createdAt`,
skip: 0,
})
Expand Down
53 changes: 43 additions & 10 deletions packages/gatsby-source-contentful/src/fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -196,22 +196,55 @@ ${formatPluginOptionsForCLI(pluginConfig.getOriginalPluginOptions(), errors)}`,
}

let currentSyncData
const basicSyncConfig = {
limit: pageLimit,
resolveLinks: false,
}
let currentPageLimit = pageLimit
let lastCurrentPageLimit
let syncSuccess = false
try {
syncProgress = reporter.createProgress(
`Contentful: ${syncToken ? `Sync changed items` : `Sync all items`}`,
pageLimit,
currentPageLimit,
0
)
syncProgress.start()
reporter.verbose(`Contentful: Sync ${pageLimit} items per page.`)
let query = syncToken
? { nextSyncToken: syncToken, ...basicSyncConfig }
: { initial: true, ...basicSyncConfig }
currentSyncData = await client.sync(query)
reporter.verbose(`Contentful: Sync ${currentPageLimit} items per page.`)

while (!syncSuccess) {
try {
const basicSyncConfig = {
limit: currentPageLimit,
resolveLinks: false,
}
const query = syncToken
? { nextSyncToken: syncToken, ...basicSyncConfig }
: { initial: true, ...basicSyncConfig }
currentSyncData = await client.sync(query)
syncSuccess = true
} catch (e) {
// Back off page limit if responses content length exceeds Contentfuls limits.
if (
e.response?.data?.message.includes(`Response size too big`) &&
currentPageLimit > 1
) {
lastCurrentPageLimit = currentPageLimit
// Reduce page limit by a arbitrary 1/3 of the current limit to ensure
// the new and bigger entries are synced without exceeding the reponse size limit
currentPageLimit = Math.floor((currentPageLimit / 3) * 2) || 1
reporter.warn(
[
`The sync with Contentful failed using pageLimit ${lastCurrentPageLimit} as the reponse size limit of the API is exceeded.`,
`Retrying sync with pageLimit of ${currentPageLimit}`,
].join(`\n\n`)
)
continue
}
throw e
}
if (currentPageLimit !== pageLimit) {
reporter.warn(
`We recommend you to set your pageLimit in gatsby-config.js to ${currentPageLimit} to avoid failed synchronizations.`
)
}
}
} catch (e) {
reporter.panic({
id: CODES.SyncError,
Expand Down
2 changes: 1 addition & 1 deletion packages/gatsby-source-contentful/src/plugin-options.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const chalk = require(`chalk`)

const _ = require(`lodash`)

const DEFAULT_PAGE_LIMIT = 100
const DEFAULT_PAGE_LIMIT = 1000

const defaultOptions = {
host: `cdn.contentful.com`,
Expand Down

0 comments on commit fc61c88

Please sign in to comment.