diff --git a/Gruntfile.js b/Gruntfile.js index 2f0d5584a5..8c9d3b9eb8 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -38,6 +38,7 @@ const packageJSON = require('./package.json') const { CI = '', LIGHTWEIGHT_CLIENT = 'true', + MAX_EVENTS_AFTER = '', NODE_ENV = 'development', EXPOSE_SBP = '' } = process.env @@ -204,6 +205,7 @@ module.exports = (grunt) => { 'process.env.CONTRACTS_VERSION': `'${CONTRACTS_VERSION}'`, 'process.env.GI_VERSION': `'${GI_VERSION}'`, 'process.env.LIGHTWEIGHT_CLIENT': `'${LIGHTWEIGHT_CLIENT}'`, + 'process.env.MAX_EVENTS_AFTER': `'${MAX_EVENTS_AFTER}'`, 'process.env.NODE_ENV': `'${NODE_ENV}'`, 'process.env.EXPOSE_SBP': `'${EXPOSE_SBP}'` }, diff --git a/backend/database.js b/backend/database.js index e045bb142c..669d6fa2d2 100644 --- a/backend/database.js +++ b/backend/database.js @@ -9,9 +9,11 @@ import path from 'node:path' import '@sbp/okturtles.data' import { checkKey, parsePrefixableKey, prefixHandlers } from '~/shared/domains/chelonia/db.js' import LRU from 'lru-cache' +import { createCircularList } from '../shared/circularList.js' const Boom = require('@hapi/boom') +const MAX_EVENTS_AFTER = Number.parseInt(process.env.MAX_EVENTS_AFTER, 10) const production = process.env.NODE_ENV === 'production' // Defaults to `fs` in production. const persistence = process.env.GI_PERSIST || (production ? 'fs' : undefined) @@ -43,6 +45,38 @@ sbp('sbp/selectors/register', { throw Boom.notFound(`contractID ${contractID} doesn't exist!`) } let prefix = '[' + if (MAX_EVENTS_AFTER) { + const circularList = createCircularList(MAX_EVENTS_AFTER, undefined) + while (currentHEAD !== hash) { + const entry = await sbp('chelonia/db/getEntry', currentHEAD) + currentHEAD = entry.message().previousHEAD + circularList.add(entry) + } + const entry = await sbp('chelonia/db/getEntry', currentHEAD) + circularList.add(entry) + + const entries = circularList.toArray() + let i = 0 + return new Readable({ + read (): any { + try { + const entry = entries[i++] + if (entry) { + const json = `"${strToB64(entry.serialize())}"` + this.push(prefix + json) + prefix = ',' + } else { + this.push(prefix === ',' ? ']' : '[]') + this.push(null) + } + } catch (e) { + console.error(`read(): ${e.message}:`, e) + this.push(prefix === ',' ? ']' : '[]') + this.push(null) + } + } + }) + } // NOTE: if this ever stops working you can also try Readable.from(): // https://nodejs.org/api/stream.html#stream_stream_readable_from_iterable_options return new Readable({ @@ -51,6 +85,7 @@ sbp('sbp/selectors/register', { const entry = await sbp('chelonia/db/getEntry', currentHEAD) const json = `"${strToB64(entry.serialize())}"` if (currentHEAD !== hash) { + const json = `"${strToB64(entry.serialize())}"` this.push(prefix + json) currentHEAD = entry.message().previousHEAD prefix = ',' diff --git a/frontend/model/captureLogs.js b/frontend/model/captureLogs.js index f7fbfd0095..70b1e0afd5 100644 --- a/frontend/model/captureLogs.js +++ b/frontend/model/captureLogs.js @@ -1,6 +1,6 @@ import sbp from '@sbp/sbp' import { CAPTURED_LOGS, SET_APP_LOGS_FILTER } from '~/frontend/utils/events.js' -import { createCircularList } from '~/frontend/utils/circularList.js' +import { createCircularList } from '~/shared/circularList.js' /* - giConsole/[username]/entries - the stored log entries. diff --git a/frontend/utils/circularList.js b/shared/circularList.js similarity index 100% rename from frontend/utils/circularList.js rename to shared/circularList.js diff --git a/shared/domains/chelonia/chelonia.js b/shared/domains/chelonia/chelonia.js index 7506cfff5e..055451b70f 100644 --- a/shared/domains/chelonia/chelonia.js +++ b/shared/domains/chelonia/chelonia.js @@ -14,6 +14,8 @@ import { GIMessage } from './GIMessage.js' import { ChelErrorUnrecoverable } from './errors.js' import type { GIOpContract, GIOpActionUnencrypted } from './GIMessage.js' +const MAX_EVENTS_AFTER = Number.parseInt(process.env.MAX_EVENTS_AFTER, 10) + // TODO: define ChelContractType for /defineContract export type ChelRegParams = { @@ -320,9 +322,28 @@ export default (sbp('sbp/selectors/register', { // the events one-by-one instead of converting to giant json object? // however, note if we do that they would be processed in reverse... 'chelonia/out/eventsAfter': async function (contractID: string, since: string) { - const events = await fetch(`${this.config.connectionURL}/eventsAfter/${contractID}/${since}`) + let events = await fetch(`${this.config.connectionURL}/eventsAfter/${contractID}/${since}`) .then(handleFetchResult('json')) if (Array.isArray(events)) { + // Sanity check + if (GIMessage.deserialize(b64ToStr(events[events.length - 1])).hash() !== since) { + throw new Error('hash() !== since') + } + // Maybe we didn't receive all the requested events because of eventsAfter's limit. + if (MAX_EVENTS_AFTER && (events.length === MAX_EVENTS_AFTER)) { + while (true) { + const intermediateEventHash = GIMessage.deserialize(b64ToStr(events[0])).hash() + const nextEvents = await fetch(`${this.config.connectionURL}/eventsAfter/${contractID}/${intermediateEventHash}`) + .then(handleFetchResult('json')) + // Break if we didn't receive any event we didn't have yet. + // Note: nextEvents usually ends with an intermediate event we already have. + if (!Array.isArray(nextEvents) || nextEvents.length < 2) break + nextEvents.pop() + events = [...nextEvents, ...events] + // Only continue if we hit the limit again. + if (nextEvents.length !== MAX_EVENTS_AFTER - 1) break + } + } return events.reverse().map(b64ToStr) } },