Add Python web scraper for NJC travel rates with currency extraction

- Implemented Python scraper using BeautifulSoup and pandas to automatically collect travel rates from official NJC website
- Added currency extraction from table titles (supports EUR, USD, AUD, CAD, ARS, etc.)
- Added country extraction from table titles for international rates
- Flatten pandas MultiIndex columns for cleaner data structure
- Default to CAD for domestic Canadian sources (accommodations and domestic tables)
- Created SQLite database schema (raw_tables, rate_entries, exchange_rates, accommodations)
- Successfully scraped 92 tables with 17,205 rate entries covering 25 international cities
- Added migration script to convert scraped data to Node.js database format
- Updated .gitignore for Python files (.venv/, __pycache__, *.pyc, *.sqlite3)
- Fixed city validation and currency conversion in main app
- Added comprehensive debug and verification scripts

This replaces manual JSON maintenance with automated data collection from official government source.
This commit is contained in:
2026-01-13 09:21:43 -05:00
commit 15094ac94b
84 changed files with 19859 additions and 0 deletions

View File

@@ -0,0 +1 @@
"""Gov Travel Scraper."""

224
src/gov_travel/db.py Normal file
View File

@@ -0,0 +1,224 @@
from __future__ import annotations
import json
import sqlite3
from pathlib import Path
from typing import Iterable
SCHEMA_STATEMENTS = [
"""
CREATE TABLE IF NOT EXISTS raw_tables (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source TEXT NOT NULL,
source_url TEXT NOT NULL,
table_index INTEGER NOT NULL,
title TEXT,
data_json TEXT NOT NULL
)
""",
"""
CREATE TABLE IF NOT EXISTS rate_entries (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source TEXT NOT NULL,
source_url TEXT NOT NULL,
country TEXT,
city TEXT,
province TEXT,
currency TEXT,
rate_type TEXT,
rate_amount REAL,
unit TEXT,
effective_date TEXT,
raw_json TEXT NOT NULL
)
""",
"""
CREATE TABLE IF NOT EXISTS exchange_rates (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source TEXT NOT NULL,
source_url TEXT NOT NULL,
currency TEXT,
rate_to_cad REAL,
effective_date TEXT,
raw_json TEXT NOT NULL
)
""",
"""
CREATE TABLE IF NOT EXISTS accommodations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source TEXT NOT NULL,
source_url TEXT NOT NULL,
property_name TEXT,
address TEXT,
city TEXT,
province TEXT,
phone TEXT,
rate_amount REAL,
currency TEXT,
effective_date TEXT,
raw_json TEXT NOT NULL
)
""",
]
def connect(db_path: Path) -> sqlite3.Connection:
db_path.parent.mkdir(parents=True, exist_ok=True)
connection = sqlite3.connect(db_path)
connection.row_factory = sqlite3.Row
return connection
def init_db(connection: sqlite3.Connection) -> None:
for statement in SCHEMA_STATEMENTS:
connection.execute(statement)
connection.commit()
def insert_raw_tables(
connection: sqlite3.Connection,
source: str,
source_url: str,
tables: Iterable[dict],
) -> None:
payload = [
(
source,
source_url,
table["table_index"],
table.get("title"),
json.dumps(table["data"], ensure_ascii=False),
)
for table in tables
]
connection.executemany(
"""
INSERT INTO raw_tables (source, source_url, table_index, title, data_json)
VALUES (?, ?, ?, ?, ?)
""",
payload,
)
connection.commit()
def insert_rate_entries(
connection: sqlite3.Connection,
entries: Iterable[dict],
) -> None:
payload = [
(
entry["source"],
entry["source_url"],
entry.get("country"),
entry.get("city"),
entry.get("province"),
entry.get("currency"),
entry.get("rate_type"),
entry.get("rate_amount"),
entry.get("unit"),
entry.get("effective_date"),
json.dumps(entry["raw"], ensure_ascii=False),
)
for entry in entries
]
if not payload:
return
connection.executemany(
"""
INSERT INTO rate_entries (
source,
source_url,
country,
city,
province,
currency,
rate_type,
rate_amount,
unit,
effective_date,
raw_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
payload,
)
connection.commit()
def insert_exchange_rates(
connection: sqlite3.Connection,
entries: Iterable[dict],
) -> None:
payload = [
(
entry["source"],
entry["source_url"],
entry.get("currency"),
entry.get("rate_to_cad"),
entry.get("effective_date"),
json.dumps(entry["raw"], ensure_ascii=False),
)
for entry in entries
]
if not payload:
return
connection.executemany(
"""
INSERT INTO exchange_rates (
source,
source_url,
currency,
rate_to_cad,
effective_date,
raw_json
)
VALUES (?, ?, ?, ?, ?, ?)
""",
payload,
)
connection.commit()
def insert_accommodations(
connection: sqlite3.Connection,
entries: Iterable[dict],
) -> None:
payload = [
(
entry["source"],
entry["source_url"],
entry.get("property_name"),
entry.get("address"),
entry.get("city"),
entry.get("province"),
entry.get("phone"),
entry.get("rate_amount"),
entry.get("currency"),
entry.get("effective_date"),
json.dumps(entry["raw"], ensure_ascii=False),
)
for entry in entries
]
if not payload:
return
connection.executemany(
"""
INSERT INTO accommodations (
source,
source_url,
property_name,
address,
city,
province,
phone,
rate_amount,
currency,
effective_date,
raw_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
payload,
)
connection.commit()

50
src/gov_travel/main.py Normal file
View File

@@ -0,0 +1,50 @@
from __future__ import annotations
import argparse
from pathlib import Path
from gov_travel import db
from gov_travel.scrapers import (
SOURCES,
extract_accommodations,
extract_exchange_rates,
extract_rate_entries,
scrape_tables_from_source,
)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Scrape travel rates into SQLite")
parser.add_argument(
"--db",
type=Path,
default=Path("data/travel_rates.sqlite3"),
help="Path to the SQLite database",
)
return parser.parse_args()
def main() -> None:
args = parse_args()
connection = db.connect(args.db)
db.init_db(connection)
for source in SOURCES:
tables = scrape_tables_from_source(source)
db.insert_raw_tables(connection, source.name, source.url, tables)
rate_entries = extract_rate_entries(source, tables)
db.insert_rate_entries(connection, rate_entries)
exchange_rates = extract_exchange_rates(source, tables)
db.insert_exchange_rates(connection, exchange_rates)
if source.name == "accommodations":
accommodations = extract_accommodations(source, tables)
db.insert_accommodations(connection, accommodations)
connection.close()
if __name__ == "__main__":
main()

247
src/gov_travel/scrapers.py Normal file
View File

@@ -0,0 +1,247 @@
from __future__ import annotations
import json
import re
from dataclasses import dataclass
from typing import Any, Iterable
import pandas as pd
import requests
from bs4 import BeautifulSoup
USER_AGENT = "GovTravelScraper/1.0 (+https://example.com)"
@dataclass(frozen=True)
class SourceConfig:
name: str
url: str
SOURCES = [
SourceConfig(name="international", url="https://www.njc-cnm.gc.ca/directive/app_d.php?lang=en"),
SourceConfig(name="domestic", url="https://www.njc-cnm.gc.ca/directive/d10/v325/s978/en"),
SourceConfig(name="accommodations", url="https://rehelv-acrd.tpsgc-pwgsc.gc.ca/lth-crl-eng.aspx"),
]
def fetch_html(url: str) -> str:
response = requests.get(url, headers={"User-Agent": USER_AGENT}, timeout=60)
response.raise_for_status()
response.encoding = response.apparent_encoding
return response.text
def extract_tables(html: str) -> list[pd.DataFrame]:
return pd.read_html(html)
def _normalize_header(header: str) -> str:
return re.sub(r"\s+", " ", header.strip().lower())
def _parse_amount(value: Any) -> float | None:
if value is None:
return None
text = str(value)
match = re.search(r"-?\d+(?:[\.,]\d+)?", text)
if not match:
return None
amount_text = match.group(0).replace(",", "")
try:
return float(amount_text)
except ValueError:
return None
def _detect_currency(value: Any, fallback: str | None = None) -> str | None:
if value is None:
return fallback
text = str(value).upper()
if "CAD" in text:
return "CAD"
if "USD" in text:
return "USD"
match = re.search(r"\b[A-Z]{3}\b", text)
if match:
return match.group(0)
return fallback
def _extract_currency_from_title(title: str | None) -> str | None:
"""Extract currency code from table title like 'Albania - Currency: Euro (EUR)'"""
if not title:
return None
# Pattern: "Currency: [Name] ([CODE])"
match = re.search(r"Currency:\s*[^(]+\(([A-Z]{3})\)", title)
if match:
return match.group(1)
return None
def _extract_country_from_title(title: str | None) -> str | None:
"""Extract country name from table title like 'Albania - Currency: Euro (EUR)'"""
if not title:
return None
# Country is before the first " - "
match = re.match(r"^([^-]+)", title)
if match:
return match.group(1).strip()
return None
def _table_title_map(html: str) -> dict[int, str]:
soup = BeautifulSoup(html, "html.parser")
titles: dict[int, str] = {}
for index, table in enumerate(soup.find_all("table")):
heading = table.find_previous(["h1", "h2", "h3", "h4", "caption"])
if heading:
titles[index] = heading.get_text(strip=True)
return titles
def scrape_tables_from_source(source: SourceConfig) -> list[dict[str, Any]]:
html = fetch_html(source.url)
tables = extract_tables(html)
title_map = _table_title_map(html)
results = []
for index, table in enumerate(tables):
# Flatten MultiIndex columns before converting to JSON
if isinstance(table.columns, pd.MultiIndex):
table.columns = [col[1] if col[0] != col[1] else col[0] for col in table.columns]
data = json.loads(table.to_json(orient="records"))
results.append(
{
"table_index": index,
"title": title_map.get(index),
"data": data,
}
)
return results
def extract_rate_entries(
source: SourceConfig,
tables: Iterable[dict[str, Any]],
) -> list[dict[str, Any]]:
entries: list[dict[str, Any]] = []
for table in tables:
# Extract currency and country from table title
table_currency = _extract_currency_from_title(table.get("title"))
table_country = _extract_country_from_title(table.get("title"))
# Default to CAD for domestic Canadian sources
if table_currency is None and source.name in ("domestic", "accommodations"):
table_currency = "CAD"
for row in table["data"]:
normalized = {_normalize_header(str(k)): v for k, v in row.items()}
country = normalized.get("country") or normalized.get("country/territory") or table_country
city = normalized.get("city") or normalized.get("location")
province = normalized.get("province") or normalized.get("province/territory")
currency = _detect_currency(normalized.get("currency"), fallback=table_currency)
effective_date = normalized.get("effective date") or normalized.get("effective")
# Process meal rate columns and other numeric columns
for key, value in normalized.items():
if key in {"country", "country/territory", "city", "location", "province", "province/territory",
"currency", "effective", "effective date", "type of accommodation", "accommodation type",
"meal total", "grand total", "grand total (taxes included)"}:
continue
amount = _parse_amount(value)
if amount is None:
continue
# Use table currency (from title) instead of trying to detect from value
entries.append(
{
"source": source.name,
"source_url": source.url,
"country": country,
"city": city,
"province": province,
"currency": currency,
"rate_type": key,
"rate_amount": amount,
"unit": None,
"effective_date": effective_date,
"raw": row,
}
)
return entries
def extract_exchange_rates(
source: SourceConfig,
tables: Iterable[dict[str, Any]],
) -> list[dict[str, Any]]:
entries: list[dict[str, Any]] = []
for table in tables:
for row in table["data"]:
normalized = {_normalize_header(k): v for k, v in row.items()}
currency = (
normalized.get("currency")
or normalized.get("currency code")
or normalized.get("code")
)
rate = (
normalized.get("exchange rate")
or normalized.get("rate")
or normalized.get("cad rate")
or normalized.get("rate to cad")
)
rate_amount = _parse_amount(rate)
if not currency or rate_amount is None:
continue
entries.append(
{
"source": source.name,
"source_url": source.url,
"currency": _detect_currency(currency),
"rate_to_cad": rate_amount,
"effective_date": normalized.get("effective date") or normalized.get("date"),
"raw": row,
}
)
return entries
def extract_accommodations(
source: SourceConfig,
tables: Iterable[dict[str, Any]],
) -> list[dict[str, Any]]:
entries: list[dict[str, Any]] = []
for table in tables:
for row in table["data"]:
normalized = {_normalize_header(k): v for k, v in row.items()}
property_name = (
normalized.get("property")
or normalized.get("hotel")
or normalized.get("accommodation")
or normalized.get("name")
)
if not property_name and not normalized.get("city"):
continue
rate_amount = _parse_amount(
normalized.get("rate")
or normalized.get("room rate")
or normalized.get("daily rate")
)
currency = _detect_currency(normalized.get("rate"))
entries.append(
{
"source": source.name,
"source_url": source.url,
"property_name": property_name,
"address": normalized.get("address"),
"city": normalized.get("city") or normalized.get("location"),
"province": normalized.get("province") or normalized.get("province/territory"),
"phone": normalized.get("phone") or normalized.get("telephone"),
"rate_amount": rate_amount,
"currency": currency,
"effective_date": normalized.get("effective date") or normalized.get("effective"),
"raw": row,
}
)
return entries