--- /dev/null
+#! /usr/bin/python3
+
+from argparse import ArgumentParser
+from email.message import EmailMessage
+import io
+import json
+from smtplib import SMTP
+import statistics
+
+import requests
+import toml
+
+
+def fetch_data():
+ # Fetch new case counts for Cambridgeshire.
+ data = requests.get(
+ "https://api.coronavirus.data.gov.uk/v1/data",
+ params={
+ "filters": "areaType=utla;areaName=Cambridgeshire",
+ "structure": json.dumps(
+ ["date", "newCasesBySpecimenDate"], separators=(",", ":")
+ ),
+ },
+ ).json()["data"]
+
+ # Skip the first three days of the response. This seems to be what the
+ # website does, presumably because recent dates don't have reliable
+ # enough numbers yet.
+ data = data[3:]
+
+ last_week = data[:7]
+ mean = statistics.mean([new_cases for _, new_cases in last_week])
+ return last_week, mean
+
+
+def main():
+ parser = ArgumentParser()
+ parser.add_argument(
+ "--config", type=toml.load, default={},
+ help="Configuration file (TOML format)")
+ parser.add_argument("--email", action="store_true")
+ args = parser.parse_args()
+
+ last_week, mean = fetch_data()
+ formatted = io.StringIO()
+ print(
+ "Most recent week's worth of new cases in Cambridgeshire, "
+ "by specimen date:",
+ file=formatted)
+ print(file=formatted)
+ print(f"Mean: {mean:.1f}", file=formatted)
+ for date, new_cases in last_week:
+ print(f"{date}: {new_cases}", file=formatted)
+ if args.email and "email" in args.config:
+ new_cases_message = EmailMessage()
+ new_cases_message["From"] = args.config["email"]["from"]
+ new_cases_message["To"] = ", ".join(args.config["email"]["to_always"])
+ new_cases_message["Subject"] = f"Cambridgeshire case count: {mean:.1f}"
+ new_cases_message.set_content(formatted.getvalue())
+ with SMTP("localhost") as smtp:
+ smtp.send_message(new_cases_message)
+ else:
+ print(formatted.getvalue(), end="")
+
+
+if __name__ == "__main__":
+ main()