Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
awxiaoxian2020 committed Oct 11, 2023
1 parent efc6b4d commit 03e2c5d
Showing 1 changed file with 72 additions and 12 deletions.
84 changes: 72 additions & 12 deletions src/utilities/fetch-supporters.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import fs from 'fs';
import path from 'path';
import { promisify } from 'util';
import fetch from 'node-fetch';
import lodash from 'lodash';
import { fileURLToPath } from 'url';

Expand All @@ -23,7 +22,22 @@ const absoluteFilename = path.resolve(
filename
);

const graphqlEndpoint = 'https://api.opencollective.com/graphql/v2';
let graphqlEndpoint = 'https://api.opencollective.com/graphql/v2';

if (process.env.OPENCOLLECTIVE_API_KEY) {
// rate limit is 100 requests per minute with personal access token
// rate limit is 10 requests per minute without personal access token
console.log(
'Using personal access token to fetch supporters from OpenCollective'
);
// by default a personal access token of @chenxsan was used as I don't have access to the webpack one
// @doc https://graphql-docs-v2.opencollective.com/access#with-a-personal-token
graphqlEndpoint = `https://api.opencollective.com/graphql/v2?personalToken=${process.env.OPENCOLLECTIVE_API_KEY}`;
} else {
console.log(
'No personal access token found, using public API to fetch supporters from OpenCollective'
);
}

// https://github.com/opencollective/opencollective-api/blob/master/server/graphql/v2/query/TransactionsQuery.ts#L81
const graphqlPageSize = 1000;
Expand Down Expand Up @@ -91,26 +105,72 @@ const getAllNodes = async (graphqlQuery, getNodes) => {

let allNodes = [];

let limit = 10,
remaining = 10,
reset;
if (process.env.OPENCOLLECTIVE_API_KEY) {
limit = 100;
remaining = 100;
}
// Handling pagination if necessary
// eslint-disable-next-line
while (true) {
if (remaining === 0) {
console.log(`Rate limit exceeded. Sleeping until ${new Date(reset)}.`);
await new Promise((resolve) =>
setTimeout(resolve, reset - Date.now() + 100)
);
}
const result = await fetch(graphqlEndpoint, {
method: 'POST',
body: JSON.stringify(body),
headers: {
'Content-Type': 'application/json',
},
}).then((response) => response.json());
console.log(result);
if (result.errors) throw new Error(result.errors[0].message);
const nodes = getNodes(result.data);
allNodes = [...allNodes, ...nodes];
body.variables.offset += graphqlPageSize;
if (nodes.length < graphqlPageSize) {
return allNodes;
}).then(async (response) => {
if (response.headers.get('content-type').includes('json')) {
const json = await response.json();
console.log('json', json);
if (json.error) {
// when rate limit exceeded, api won't return headers data like x-ratelimit-limit, etc.
remaining = 0;
reset = Date.now() + 1000 * 60; // 1 minute
} else {
limit = response.headers.get('x-ratelimit-limit') * 1;
remaining = response.headers.get('x-ratelimit-remaining') * 1;
reset = response.headers.get('x-ratelimit-reset') * 1000;
console.log(
`Rate limit: ${remaining}/${limit} remaining. Reset in ${new Date(
reset
)}`
);
}
return json;
} else {
// utilities/fetch-supporters: SyntaxError: Unexpected token < in JSON at position 0
console.log('something wrong when fetching supporters');
return {
error: {
message: await response.text(),
},
};
}
});
// when rate limit exceeded, api will return {error: {message: ''}}
// but we could hopefully avoid rate limit by sleeping in the beginning of the loop
// however, when there're multiple task running simultaneously, it's still possible to hit the rate limit
if (result.error) {
console.log('error', result.error);
// let the loop continue
} else {
// sleep for a while
await new Promise((resolve) => setTimeout(resolve, 6000));
const nodes = getNodes(result.data);
allNodes = [...allNodes, ...nodes];
body.variables.offset += graphqlPageSize;
if (nodes.length < graphqlPageSize) {
return allNodes;
} else {
// more nodes to fetch
}
}
}
};
Expand Down

0 comments on commit 03e2c5d

Please sign in to comment.