Skip to content

JPR Election Results Scraper #339

JPR Election Results Scraper

JPR Election Results Scraper #339

Workflow file for this run

# This workflow will install Python dependencies, add enviromental variables and run the scraper
name: JPR Election Results Scraper
on:
#schedule:
# - cron: '3,33 * * * *' # <-- Set your cron here (UTC). Uses github which can be ~2-10 mins late.
workflow_dispatch:
permissions:
contents: write
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 5
env:
DATAWRAPPER_API_KEY: ${{ secrets.DATAWRAPPER_API_KEY}}
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v3
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m ensurepip --upgrade
python -m pip install --upgrade pip
pip install datawrapper requests flake8 pandas
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Run the scraper
run: python JPRscraper.py
- name: Commit and push to main branch
run: |
git config user.name "Automated"
git config user.email "actions@users.noreply.github.com"
git add -A
timestamp=$(date -u)
git commit -m "Updated at: ${timestamp}" || exit 0
git push origin main