Skip to content

Run Scraper and Commit Results #11

Run Scraper and Commit Results

Run Scraper and Commit Results #11

Workflow file for this run

name: Run Scraper and Commit Results
on:
# schedule:
# Runs every day at midnight UTC (you can adjust the schedule as needed)
# - cron: '0 0 * * *'
workflow_dispatch:
# Allows manual triggering of the workflow from GitHub Actions UI
jobs:
run-scraper:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
# Step 1: Checkout the repository
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
# Step 2: Set up Python
- name: Set up Python 3.x
uses: actions/setup-python@v4
with:
python-version: '3.x'
# Step 3: Install dependencies
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
# Step 4: Get the date for the previous day
- name: Set date variable for yesterday
run: echo "YESTERDAY=$(date -d 'yesterday' +'%Y-%m-%d')" >> $GITHUB_ENV
# Step 5: Run the scraper for the previous day
- name: Run the scraper for yesterday
run: |
python news_scraper.py 2024-01-01
# Step 6: Commit and push changes if there are any new files in raw_extractions
- name: Commit and push changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Auto-update: Scraped data for ${{ env.YESTERDAY }}"
branch: ${{ github.head_ref }}