2023-11-07 15:55:05 +00:00
|
|
|
|
"""Agenda data."""
|
|
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
|
import configparser
|
|
|
|
|
import operator
|
|
|
|
|
import os
|
|
|
|
|
import typing
|
2023-11-09 16:51:01 +00:00
|
|
|
|
from datetime import date, datetime, timedelta
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
import dateutil.rrule
|
2023-11-07 15:55:05 +00:00
|
|
|
|
import dateutil.tz
|
2023-11-09 16:51:01 +00:00
|
|
|
|
import holidays # type: ignore
|
2023-11-10 00:02:22 +00:00
|
|
|
|
import isodate
|
2023-11-07 15:55:05 +00:00
|
|
|
|
import lxml
|
|
|
|
|
import pytz
|
2023-11-08 14:40:07 +00:00
|
|
|
|
import yaml
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
|
|
from . import (
|
|
|
|
|
accommodation,
|
|
|
|
|
birthday,
|
|
|
|
|
calendar,
|
|
|
|
|
conference,
|
2023-11-12 18:58:56 +00:00
|
|
|
|
domains,
|
2023-11-07 15:55:05 +00:00
|
|
|
|
economist,
|
|
|
|
|
fx,
|
|
|
|
|
gwr,
|
2023-11-21 08:15:16 +00:00
|
|
|
|
meetup,
|
2023-11-07 15:55:05 +00:00
|
|
|
|
stock_market,
|
|
|
|
|
subscription,
|
|
|
|
|
sun,
|
|
|
|
|
thespacedevs,
|
|
|
|
|
travel,
|
|
|
|
|
uk_holiday,
|
2023-11-09 16:51:01 +00:00
|
|
|
|
uk_midnight,
|
2023-11-07 15:55:05 +00:00
|
|
|
|
waste_schedule,
|
|
|
|
|
)
|
|
|
|
|
from .types import Event
|
|
|
|
|
|
|
|
|
|
here = dateutil.tz.tzlocal()
|
|
|
|
|
|
|
|
|
|
# deadline to file tax return
|
|
|
|
|
# credit card expiry dates
|
|
|
|
|
# morzine ski lifts
|
|
|
|
|
# chalet availablity calendar
|
|
|
|
|
|
|
|
|
|
# starlink visible
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def timezone_transition(
|
2023-11-09 16:51:01 +00:00
|
|
|
|
start: datetime, end: datetime, key: str, tz_name: str
|
2023-11-07 15:55:05 +00:00
|
|
|
|
) -> list[Event]:
|
|
|
|
|
"""Clocks changes."""
|
|
|
|
|
tz = pytz.timezone(tz_name)
|
|
|
|
|
return [
|
|
|
|
|
Event(name=key, date=pytz.utc.localize(t).astimezone(tz))
|
|
|
|
|
for t in tz._utc_transition_times # type: ignore
|
2023-11-09 16:51:01 +00:00
|
|
|
|
if start <= t <= end
|
2023-11-07 15:55:05 +00:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_us_holidays(start_date: date, end_date: date) -> list[Event]:
|
|
|
|
|
"""Date and name of next US holiday."""
|
|
|
|
|
found: list[Event] = []
|
|
|
|
|
for year in range(start_date.year, end_date.year + 1):
|
|
|
|
|
hols = holidays.UnitedStates(years=year)
|
|
|
|
|
found += [
|
2023-11-08 04:06:04 +00:00
|
|
|
|
Event(name="us_holiday", date=hol_date, title=title.replace("'", "’"))
|
2023-11-07 15:55:05 +00:00
|
|
|
|
for hol_date, title in hols.items()
|
|
|
|
|
if start_date < hol_date < end_date
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
extra = []
|
|
|
|
|
for e in found:
|
|
|
|
|
if e.title != "Thanksgiving":
|
|
|
|
|
continue
|
|
|
|
|
extra += [
|
|
|
|
|
Event(
|
|
|
|
|
name="us_holiday", date=e.date + timedelta(days=1), title="Black Friday"
|
|
|
|
|
),
|
|
|
|
|
Event(
|
|
|
|
|
name="us_holiday", date=e.date + timedelta(days=4), title="Cyber Monday"
|
|
|
|
|
),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
return found + extra
|
|
|
|
|
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
def dates_from_rrule(
|
|
|
|
|
rrule: str, start: date, end: date
|
|
|
|
|
) -> typing.Sequence[datetime | date]:
|
|
|
|
|
"""Generate events from an RRULE between start_date and end_date."""
|
|
|
|
|
all_day = not any(param in rrule for param in ["BYHOUR", "BYMINUTE", "BYSECOND"])
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
return [
|
|
|
|
|
i.date() if all_day else i
|
|
|
|
|
for i in dateutil.rrule.rrulestr(rrule, dtstart=uk_midnight(start)).between(
|
|
|
|
|
uk_midnight(start), uk_midnight(end)
|
|
|
|
|
)
|
|
|
|
|
]
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def waste_collection_events(data_dir: str) -> list[Event]:
|
|
|
|
|
"""Waste colllection events."""
|
|
|
|
|
postcode = "BS48 3HG"
|
|
|
|
|
uprn = "24071046"
|
|
|
|
|
|
|
|
|
|
html = await waste_schedule.get_html(data_dir, postcode, uprn)
|
|
|
|
|
root = lxml.html.fromstring(html)
|
|
|
|
|
events = waste_schedule.parse(root)
|
|
|
|
|
return events
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def bristol_waste_collection_events(
|
|
|
|
|
data_dir: str, start_date: date
|
|
|
|
|
) -> list[Event]:
|
|
|
|
|
"""Waste colllection events."""
|
|
|
|
|
uprn = "358335"
|
|
|
|
|
|
|
|
|
|
return await waste_schedule.get_bristol_gov_uk(start_date, data_dir, uprn)
|
|
|
|
|
|
|
|
|
|
|
2023-11-08 04:06:04 +00:00
|
|
|
|
def combine_holidays(events: list[Event]) -> list[Event]:
|
2023-11-08 14:40:07 +00:00
|
|
|
|
"""Combine UK and US holidays with the same date and title."""
|
2023-11-08 04:06:04 +00:00
|
|
|
|
combined: dict[tuple[date, str], Event] = {}
|
|
|
|
|
|
|
|
|
|
for e in events:
|
|
|
|
|
assert isinstance(e.title, str) and isinstance(e.date, date)
|
|
|
|
|
event_key = (e.date, e.title)
|
|
|
|
|
combined[event_key] = (
|
|
|
|
|
Event(name="bank_holiday", date=e.date, title=e.title + " (UK & US)")
|
|
|
|
|
if event_key in combined
|
|
|
|
|
else e
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return list(combined.values())
|
|
|
|
|
|
|
|
|
|
|
2023-11-09 23:35:38 +00:00
|
|
|
|
def get_yaml_event_date_field(item: dict[str, str]) -> str:
|
|
|
|
|
"""Event date field name."""
|
|
|
|
|
return (
|
|
|
|
|
"end_date"
|
|
|
|
|
if item["name"] == "travel_insurance"
|
|
|
|
|
else ("start_date" if "start_date" in item else "date")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2023-11-10 00:02:22 +00:00
|
|
|
|
def get_yaml_event_end_date_field(item: dict[str, str]) -> str:
|
|
|
|
|
"""Event date field name."""
|
|
|
|
|
return (
|
|
|
|
|
"end_date"
|
|
|
|
|
if item["name"] == "travel_insurance"
|
|
|
|
|
else ("start_date" if "start_date" in item else "date")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
def read_events_yaml(data_dir: str, start: date, end: date) -> list[Event]:
|
2023-11-08 14:40:07 +00:00
|
|
|
|
"""Read eventes from YAML file."""
|
2023-11-09 16:51:01 +00:00
|
|
|
|
events: list[Event] = []
|
|
|
|
|
for item in yaml.safe_load(open(os.path.join(data_dir, "events.yaml"))):
|
2023-11-10 00:02:22 +00:00
|
|
|
|
duration = (
|
|
|
|
|
isodate.parse_duration(item["duration"]) if "duration" in item else None
|
|
|
|
|
)
|
2023-11-09 16:51:01 +00:00
|
|
|
|
dates = (
|
|
|
|
|
dates_from_rrule(item["rrule"], start, end)
|
|
|
|
|
if "rrule" in item
|
2023-11-09 23:35:38 +00:00
|
|
|
|
else [item[get_yaml_event_date_field(item)]]
|
2023-11-08 12:48:02 +00:00
|
|
|
|
)
|
2023-11-09 16:51:01 +00:00
|
|
|
|
for dt in dates:
|
|
|
|
|
e = Event(
|
|
|
|
|
name=item["name"],
|
|
|
|
|
date=dt,
|
2023-11-09 23:35:38 +00:00
|
|
|
|
end_date=(
|
2023-11-10 00:02:22 +00:00
|
|
|
|
dt + duration
|
|
|
|
|
if duration
|
|
|
|
|
else (
|
|
|
|
|
item.get("end_date")
|
|
|
|
|
if item["name"] != "travel_insurance"
|
|
|
|
|
else None
|
|
|
|
|
)
|
2023-11-09 23:35:38 +00:00
|
|
|
|
),
|
2023-11-09 16:51:01 +00:00
|
|
|
|
title=item.get("title"),
|
|
|
|
|
url=item.get("url"),
|
|
|
|
|
)
|
|
|
|
|
events.append(e)
|
|
|
|
|
return events
|
2023-11-08 12:48:02 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-19 11:42:39 +00:00
|
|
|
|
def get_config() -> configparser.ConfigParser:
|
|
|
|
|
"""Load config file."""
|
2023-11-07 15:55:05 +00:00
|
|
|
|
config_filename = os.path.join(os.path.dirname(__file__), "..", "config")
|
|
|
|
|
|
|
|
|
|
assert os.path.exists(config_filename)
|
|
|
|
|
|
|
|
|
|
config = configparser.ConfigParser()
|
|
|
|
|
config.read(config_filename)
|
|
|
|
|
|
2023-11-19 11:42:39 +00:00
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_data(now: datetime) -> typing.Mapping[str, str | object]:
|
|
|
|
|
"""Get data to display on agenda dashboard."""
|
|
|
|
|
config = get_config()
|
|
|
|
|
|
2023-11-07 15:55:05 +00:00
|
|
|
|
data_dir = config.get("data", "dir")
|
|
|
|
|
|
|
|
|
|
rocket_dir = os.path.join(data_dir, "thespacedevs")
|
|
|
|
|
today = now.date()
|
|
|
|
|
two_weeks_ago = today - timedelta(weeks=2)
|
|
|
|
|
last_week = today - timedelta(weeks=1)
|
|
|
|
|
last_year = today - timedelta(days=365)
|
|
|
|
|
next_year = today + timedelta(days=365)
|
|
|
|
|
|
|
|
|
|
minus_365 = now - timedelta(days=365)
|
|
|
|
|
plus_365 = now + timedelta(days=365)
|
|
|
|
|
|
|
|
|
|
(
|
|
|
|
|
gbpusd,
|
|
|
|
|
gwr_advance_tickets,
|
|
|
|
|
bank_holiday,
|
|
|
|
|
rockets,
|
|
|
|
|
backwell_bins,
|
|
|
|
|
bristol_bins,
|
|
|
|
|
) = await asyncio.gather(
|
|
|
|
|
fx.get_gbpusd(config),
|
|
|
|
|
gwr.advance_ticket_date(data_dir),
|
|
|
|
|
uk_holiday.bank_holiday_list(last_year, next_year, data_dir),
|
|
|
|
|
thespacedevs.get_launches(rocket_dir, limit=40),
|
|
|
|
|
waste_collection_events(data_dir),
|
|
|
|
|
bristol_waste_collection_events(data_dir, today),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
reply = {
|
|
|
|
|
"now": now,
|
|
|
|
|
"gbpusd": gbpusd,
|
|
|
|
|
"stock_markets": stock_market.open_and_close(),
|
|
|
|
|
"rockets": rockets,
|
2023-11-10 23:57:38 +00:00
|
|
|
|
"gwr_advance_tickets": gwr_advance_tickets,
|
2023-11-07 15:55:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
my_data = config["data"]["personal-data"]
|
2023-11-10 10:42:17 +00:00
|
|
|
|
events = (
|
|
|
|
|
[
|
|
|
|
|
Event(name="mothers_day", date=uk_holiday.get_mothers_day(today)),
|
|
|
|
|
]
|
|
|
|
|
+ timezone_transition(minus_365, plus_365, "uk_clock_change", "Europe/London")
|
|
|
|
|
+ timezone_transition(
|
|
|
|
|
minus_365, plus_365, "us_clock_change", "America/New_York"
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
|
2023-11-19 11:41:42 +00:00
|
|
|
|
if gwr_advance_tickets:
|
|
|
|
|
events.append(Event(name="gwr_advance_tickets", date=gwr_advance_tickets))
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
events += combine_holidays(bank_holiday + get_us_holidays(last_year, next_year))
|
2023-11-07 15:55:05 +00:00
|
|
|
|
events += birthday.get_birthdays(last_year, os.path.join(my_data, "entities.yaml"))
|
|
|
|
|
events += accommodation.get_events(os.path.join(my_data, "accommodation.yaml"))
|
2023-11-08 12:26:50 +00:00
|
|
|
|
events += travel.all_events(config["data"]["personal-data"])
|
2023-11-07 15:55:05 +00:00
|
|
|
|
events += conference.get_list(os.path.join(my_data, "conferences.yaml"))
|
|
|
|
|
events += backwell_bins + bristol_bins
|
2023-11-09 16:51:01 +00:00
|
|
|
|
events += read_events_yaml(my_data, last_year, next_year)
|
2023-11-07 15:55:05 +00:00
|
|
|
|
events += subscription.get_events(os.path.join(my_data, "subscriptions.yaml"))
|
2023-11-10 10:42:17 +00:00
|
|
|
|
events += economist.publication_dates(last_week, next_year)
|
2023-11-21 08:15:16 +00:00
|
|
|
|
events += meetup.get_events(my_data)
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
2023-11-12 18:58:56 +00:00
|
|
|
|
events += domains.renewal_dates(my_data)
|
|
|
|
|
|
2023-11-23 22:05:05 +00:00
|
|
|
|
for launch in rockets:
|
2023-11-25 10:08:49 +00:00
|
|
|
|
dt = None
|
|
|
|
|
|
|
|
|
|
if launch["net_precision"] == "Day":
|
|
|
|
|
dt = datetime.strptime(launch["net"], "%Y-%m-%dT00:00:00Z").date()
|
|
|
|
|
elif launch["t0_time"]:
|
|
|
|
|
dt = pytz.utc.localize(
|
|
|
|
|
datetime.strptime(launch["net"], "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not dt:
|
2023-11-23 22:05:05 +00:00
|
|
|
|
continue
|
2023-11-25 10:08:49 +00:00
|
|
|
|
|
|
|
|
|
rocket_name = f'🚀{launch["rocket"]}: {launch["mission"]["name"]}'
|
|
|
|
|
e = Event(name="rocket", date=dt, title=rocket_name)
|
2023-11-23 22:05:05 +00:00
|
|
|
|
events.append(e)
|
|
|
|
|
|
2023-11-07 15:55:05 +00:00
|
|
|
|
events.sort(key=operator.attrgetter("as_datetime"))
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
|
observer = sun.bristol()
|
|
|
|
|
reply["sunrise"] = sun.sunrise(observer)
|
|
|
|
|
reply["sunset"] = sun.sunset(observer)
|
2023-11-07 15:55:05 +00:00
|
|
|
|
reply["events"] = events
|
|
|
|
|
reply["last_week"] = last_week
|
|
|
|
|
reply["two_weeks_ago"] = two_weeks_ago
|
|
|
|
|
|
|
|
|
|
reply["fullcalendar_events"] = calendar.build_events(events)
|
|
|
|
|
|
|
|
|
|
return reply
|