Skip to content

Commit

Permalink
New: Support automatic scraping periodically (#613)
Browse files Browse the repository at this point in the history
הוספתי אפשרות בהגדרות להגיד שרוצים שירוץ אוטומטית כל כמה שעות
זה נראה ככה:
<img width="487" alt="Screenshot 2024-11-01 at 22 57 09"
src="https://github.com/user-attachments/assets/c456a632-9067-45f2-a61b-06c23fa0b3b6">

כשלוחצים על הפעל זה מאתחל את הטיימר של ההרצה האוטומטית.
  • Loading branch information
brafdlog authored Nov 10, 2024
2 parents eaefde7 + 55b0148 commit d89dd7c
Show file tree
Hide file tree
Showing 9 changed files with 92 additions and 18 deletions.
1 change: 1 addition & 0 deletions packages/main/src/backend/commonTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export interface Config {
chromiumPath?: string;
maxConcurrency?: number;
timeout: number;
periodicScrapingIntervalHours?: number;
};
useReactUI?: boolean;
}
Expand Down
13 changes: 11 additions & 2 deletions packages/main/src/backend/eventEmitters/EventEmitter.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// eslint-disable-next-line max-classes-per-file
import { type EnrichedTransaction, type OutputVendorName } from '@/backend/commonTypes';
import Emittery from 'emittery';
import { type CompanyTypes } from 'israeli-bank-scrapers-core';
import type { EnrichedTransaction, OutputVendorName } from '../commonTypes';

export enum EventNames {
IMPORT_PROCESS_START = 'IMPORT_PROCESS_START',
Expand Down Expand Up @@ -115,6 +115,15 @@ export class ExporterEndEvent extends ExporterEvent {
}
}

export class ImportStartEvent extends BudgetTrackingEvent {
nextAutomaticScrapeDate?: Date | null;

constructor(message: string, nextAutomaticScrapeDate?: Date | null) {
super({ message });
this.nextAutomaticScrapeDate = nextAutomaticScrapeDate;
}
}

export class DownalodChromeEvent extends BudgetTrackingEvent {
percent: number;

Expand All @@ -125,7 +134,7 @@ export class DownalodChromeEvent extends BudgetTrackingEvent {
}

export interface EventDataMap {
[EventNames.IMPORT_PROCESS_START]: BudgetTrackingEvent;
[EventNames.IMPORT_PROCESS_START]: ImportStartEvent;
[EventNames.DOWNLOAD_CHROME]: DownalodChromeEvent;
[EventNames.IMPORTER_START]: ImporterEvent;
[EventNames.IMPORTER_PROGRESS]: ImporterEvent;
Expand Down
46 changes: 37 additions & 9 deletions packages/main/src/backend/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,70 @@ import { scrapeFinancialAccountsAndFetchTransactions } from '@/backend/import/im
import moment from 'moment';
import * as configManager from './configManager/configManager';
import * as Events from './eventEmitters/EventEmitter';
import { EventNames } from './eventEmitters/EventEmitter';
import outputVendors from './export/outputVendors';
import * as bankScraper from './import/bankScraper';
import logger from '../logging/logger';

export { CompanyTypes } from 'israeli-bank-scrapers-core';
export { Events, configManager, outputVendors };

export const { inputVendors } = bankScraper;
let intervalId: NodeJS.Timeout | null = null;

export async function setPeriodicScrapingIfNeeded(config: Config, optionalEventPublisher?: Events.EventPublisher) {
const hoursInterval = config.scraping.periodicScrapingIntervalHours;
optionalEventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter();

stopPeriodicScraping();

if (hoursInterval) {
await optionalEventPublisher.emit(EventNames.LOG, {
message: `Setting up periodic scraping every ${hoursInterval} hours`,
});
intervalId = setInterval(
async () => {
await scrapeAndUpdateOutputVendors(config, optionalEventPublisher);
},
hoursInterval * 1000 * 60 * 60,
);
}
}

export function stopPeriodicScraping() {
if (intervalId) {
clearInterval(intervalId);
}
}

export async function scrapeAndUpdateOutputVendors(config: Config, optionalEventPublisher?: Events.EventPublisher) {
const eventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter();

const startDate = moment().subtract(config.scraping.numDaysBack, 'days').startOf('day').toDate();

await eventPublisher.emit(Events.EventNames.IMPORT_PROCESS_START, {
message: `Starting to scrape from ${startDate} to today`,
});
const nextAutomaticScrapeDate: Date | null = config.scraping.periodicScrapingIntervalHours
? moment().add(config.scraping.periodicScrapingIntervalHours, 'hours').toDate()
: null;

await eventPublisher.emit(
EventNames.IMPORT_PROCESS_START,
new Events.ImportStartEvent(`Starting to scrape from ${startDate} to today`, nextAutomaticScrapeDate),
);

const companyIdToTransactions = await scrapeFinancialAccountsAndFetchTransactions(
config.scraping,
startDate,
eventPublisher,
);
try {
const executionResult = await createTransactionsInExternalVendors(
return await createTransactionsInExternalVendors(
config.outputVendors,
companyIdToTransactions,
startDate,
eventPublisher,
);

return executionResult;
} catch (e) {
logger.error('Failed to create transactions in external vendors', e);
await eventPublisher.emit(
Events.EventNames.GENERAL_ERROR,
EventNames.GENERAL_ERROR,
new Events.BudgetTrackingEvent({
message: (e as Error).message,
error: e as Error,
Expand Down
6 changes: 4 additions & 2 deletions packages/main/src/handlers/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { App } from '@/app-globals';
import { scrapeAndUpdateOutputVendors } from '@/backend';
import { scrapeAndUpdateOutputVendors, setPeriodicScrapingIfNeeded, stopPeriodicScraping } from '@/backend';
import { type Credentials } from '@/backend/commonTypes';
import { getConfig } from '@/backend/configManager/configManager';
import { BudgetTrackingEventEmitter } from '@/backend/eventEmitters/EventEmitter';
Expand Down Expand Up @@ -33,6 +33,7 @@ const functions: Record<string, Listener> = {
updateConfig: updateConfigHandler as Listener<void>,
getYnabAccountData,
getLogsInfo: getLogsInfoHandler,
stopPeriodicScraping,
getAppInfo: async () => {
return {
sourceCommitShort: import.meta.env.VITE_SOURCE_COMMIT_SHORT,
Expand Down Expand Up @@ -67,10 +68,11 @@ export const registerHandlers = () => {
ipcMain.on('scrape', async (event: IpcMainEvent) => {
const config = await getConfig();
const eventSubscriber = new BudgetTrackingEventEmitter();
scrapeAndUpdateOutputVendors(config, eventSubscriber);
eventSubscriber.onAny((eventName, eventData) => {
event.reply('scrapingProgress', JSON.stringify({ eventName, eventData }));
});
await setPeriodicScrapingIfNeeded(config, eventSubscriber);
await scrapeAndUpdateOutputVendors(config, eventSubscriber);
});

ipcMain.removeAllListeners('getYnabAccountData');
Expand Down
4 changes: 2 additions & 2 deletions packages/preload/src/eventsBridge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ export async function scrape(handleScrapingEvent: HandleScrapingEvent) {
}
}

export async function toggleUIVersion() {
await electron.ipcRenderer.send('toggleUiVersion');
export async function stopPeriodicScraping() {
return electron.ipcRenderer.invoke('stopPeriodicScraping');
}

export async function openExternal(url: string) {
Expand Down
8 changes: 8 additions & 0 deletions packages/renderer/src/components/Body.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,13 @@ const Body = () => {
closeModal();
};

const shouldShowNextRunTime = !!(
configStore.nextAutomaticScrapeDate && Number(configStore.config.scraping.periodicScrapingIntervalHours)
);
const nextRunTimeString = configStore.nextAutomaticScrapeDate
? new Date(configStore.nextAutomaticScrapeDate).toLocaleTimeString()
: null;

return (
<Container className={styles.root}>
<Container className={styles.container}>
Expand Down Expand Up @@ -124,6 +131,7 @@ const Body = () => {
<Button variant="dark" size="lg" onClick={cleanAndScrape} disabled={configStore.isScraping}>
הפעל
</Button>
{shouldShowNextRunTime && <h6>ריצה הבאה: {nextRunTimeString}</h6>}
<Image
src={settingsIcon}
onClick={() => showModal({} as Account, ModalStatus.GENERAL_SETTINGS)}
Expand Down
12 changes: 12 additions & 0 deletions packages/renderer/src/components/GeneralSettings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ function GeneralSettings() {
}
};

const handlePeriodicScrapingIntervalHoursChanged = (interval: string) => {
configStore.setPeriodicScrapingIntervalHours(Number(interval));
};

return (
<div className={styles.container}>
<Card className={styles.card}>
Expand Down Expand Up @@ -61,6 +65,14 @@ function GeneralSettings() {
onBlur={(event) => handleTimeoutChanged(event.target.value)}
/>
</Form.Group>
<Form.Group>
<Form.Label>לרוץ אוטומטית כל X שעות</Form.Label>
<Form.Control
className={styles.input}
defaultValue={configStore.config?.scraping.periodicScrapingIntervalHours}
onBlur={(event) => handlePeriodicScrapingIntervalHoursChanged(event.target.value)}
/>
</Form.Group>
</Form>
</Card.Body>
</Card>
Expand Down
19 changes: 16 additions & 3 deletions packages/renderer/src/store/ConfigStore.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
type Log,
type OutputVendorName,
} from '../types';
import { type ImportStartEvent } from '../../../main/src/backend/eventEmitters/EventEmitter';

interface AccountScrapingData {
logs: Log[];
Expand Down Expand Up @@ -71,6 +72,7 @@ export class ConfigStore {
config: Config;

chromeDownloadPercent = 0;
nextAutomaticScrapeDate?: Date | null;

// TODO: move this to a separate store
accountScrapingData: Map<CompanyTypes | OutputVendorName, AccountScrapingData>;
Expand Down Expand Up @@ -130,6 +132,7 @@ export class ConfigStore {
clearScrapingStatus() {
this.accountScrapingData = new Map();
this.updateChromeDownloadPercent(0);
this.nextAutomaticScrapeDate = null;
}

updateChromeDownloadPercent(percent: number) {
Expand All @@ -152,10 +155,13 @@ export class ConfigStore {
}

handleScrapingEvent(eventName: string, budgetTrackingEvent?: BudgetTrackingEvent) {
if (eventName === 'DOWNLOAD_CHROME') {
this.updateChromeDownloadPercent((budgetTrackingEvent as DownloadChromeEvent)?.percent);
}
if (budgetTrackingEvent) {
if (eventName === 'DOWNLOAD_CHROME') {
this.updateChromeDownloadPercent((budgetTrackingEvent as DownloadChromeEvent)?.percent);
}
if (eventName === 'IMPORT_PROCESS_START') {
this.nextAutomaticScrapeDate = (budgetTrackingEvent as ImportStartEvent).nextAutomaticScrapeDate;
}
const accountId = budgetTrackingEvent.vendorId;
if (accountId) {
if (!this.accountScrapingData.has(accountId)) {
Expand Down Expand Up @@ -224,6 +230,13 @@ export class ConfigStore {
async setChromiumPath(chromiumPath?: string) {
this.config.scraping.chromiumPath = chromiumPath;
}

setPeriodicScrapingIntervalHours(interval?: number) {
this.config.scraping.periodicScrapingIntervalHours = interval;
if (!interval || interval <= 0) {
this.nextAutomaticScrapeDate = null;
}
}
}

export const configStore = new ConfigStore();
Expand Down
1 change: 1 addition & 0 deletions packages/renderer/src/types.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ export interface Config {
accountsToScrape: AccountToScrapeConfig[];
chromiumPath?: string;
maxConcurrency?: number;
periodicScrapingIntervalHours?: number;
};
}

Expand Down

0 comments on commit d89dd7c

Please sign in to comment.