2023-11-07 15:55:05 +00:00
|
|
|
"""Agenda data."""
|
|
|
|
|
|
|
|
import asyncio
|
2023-12-09 13:51:24 +00:00
|
|
|
import collections
|
2023-11-07 15:55:05 +00:00
|
|
|
import os
|
|
|
|
import typing
|
2023-11-09 16:51:01 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
2023-11-07 15:55:05 +00:00
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
import dateutil.rrule
|
2023-11-07 15:55:05 +00:00
|
|
|
import dateutil.tz
|
2023-12-07 15:52:48 +00:00
|
|
|
import flask
|
2023-12-22 14:47:06 +00:00
|
|
|
import holidays
|
2023-12-05 00:04:56 +00:00
|
|
|
import isodate # type: ignore
|
2023-11-07 15:55:05 +00:00
|
|
|
import lxml
|
|
|
|
import pytz
|
2023-11-08 14:40:07 +00:00
|
|
|
import yaml
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
from . import (
|
|
|
|
accommodation,
|
|
|
|
birthday,
|
|
|
|
calendar,
|
|
|
|
conference,
|
2023-11-12 18:58:56 +00:00
|
|
|
domains,
|
2023-11-07 15:55:05 +00:00
|
|
|
economist,
|
|
|
|
fx,
|
|
|
|
gwr,
|
2023-12-01 10:35:58 +00:00
|
|
|
hn,
|
2023-11-21 08:15:16 +00:00
|
|
|
meetup,
|
2023-11-07 15:55:05 +00:00
|
|
|
stock_market,
|
|
|
|
subscription,
|
|
|
|
sun,
|
|
|
|
thespacedevs,
|
|
|
|
travel,
|
|
|
|
uk_holiday,
|
2023-12-26 19:29:07 +00:00
|
|
|
uk_tz,
|
2023-11-07 15:55:05 +00:00
|
|
|
waste_schedule,
|
|
|
|
)
|
2023-12-09 13:51:24 +00:00
|
|
|
from .types import Event, Holiday
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
here = dateutil.tz.tzlocal()
|
|
|
|
|
|
|
|
# deadline to file tax return
|
|
|
|
# credit card expiry dates
|
|
|
|
# morzine ski lifts
|
|
|
|
# chalet availablity calendar
|
|
|
|
|
|
|
|
# starlink visible
|
|
|
|
|
|
|
|
|
|
|
|
def timezone_transition(
|
2023-11-09 16:51:01 +00:00
|
|
|
start: datetime, end: datetime, key: str, tz_name: str
|
2023-11-07 15:55:05 +00:00
|
|
|
) -> list[Event]:
|
|
|
|
"""Clocks changes."""
|
|
|
|
tz = pytz.timezone(tz_name)
|
|
|
|
return [
|
|
|
|
Event(name=key, date=pytz.utc.localize(t).astimezone(tz))
|
|
|
|
for t in tz._utc_transition_times # type: ignore
|
2023-11-09 16:51:01 +00:00
|
|
|
if start <= t <= end
|
2023-11-07 15:55:05 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2023-12-09 13:51:24 +00:00
|
|
|
def us_holidays(start_date: date, end_date: date) -> list[Holiday]:
|
2023-12-09 11:39:33 +00:00
|
|
|
"""Get US holidays."""
|
2023-12-09 13:51:24 +00:00
|
|
|
found: list[Holiday] = []
|
2023-11-07 15:55:05 +00:00
|
|
|
for year in range(start_date.year, end_date.year + 1):
|
2023-12-09 11:39:33 +00:00
|
|
|
hols = holidays.country_holidays("US", years=year, language="en")
|
2023-11-07 15:55:05 +00:00
|
|
|
found += [
|
2023-12-23 16:12:49 +00:00
|
|
|
Holiday(date=hol_date, name=title, country="us")
|
2023-11-07 15:55:05 +00:00
|
|
|
for hol_date, title in hols.items()
|
|
|
|
if start_date < hol_date < end_date
|
|
|
|
]
|
|
|
|
|
|
|
|
extra = []
|
2023-12-09 13:51:24 +00:00
|
|
|
for h in found:
|
|
|
|
if h.name != "Thanksgiving":
|
2023-11-07 15:55:05 +00:00
|
|
|
continue
|
|
|
|
extra += [
|
2023-12-09 13:51:24 +00:00
|
|
|
Holiday(date=h.date + timedelta(days=1), name="Black Friday", country="us"),
|
|
|
|
Holiday(date=h.date + timedelta(days=4), name="Cyber Monday", country="us"),
|
2023-11-07 15:55:05 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
return found + extra
|
|
|
|
|
|
|
|
|
2023-12-23 16:12:49 +00:00
|
|
|
def get_nyse_holidays(
|
|
|
|
start_date: date, end_date: date, us_hols: list[Holiday]
|
|
|
|
) -> list[Event]:
|
|
|
|
"""NYSE holidays."""
|
|
|
|
known_us_hols = {(h.date, h.name) for h in us_hols}
|
|
|
|
found: list[Event] = []
|
|
|
|
rename = {"Thanksgiving Day": "Thanksgiving"}
|
|
|
|
for year in range(start_date.year, end_date.year + 1):
|
|
|
|
hols = holidays.financial_holidays("NYSE", years=year)
|
|
|
|
found += [
|
|
|
|
Event(
|
|
|
|
name="holiday",
|
|
|
|
date=hol_date,
|
|
|
|
title=rename.get(title, title),
|
|
|
|
)
|
|
|
|
for hol_date, title in hols.items()
|
|
|
|
if start_date < hol_date < end_date
|
|
|
|
]
|
|
|
|
found = [hol for hol in found if (hol.date, hol.title) not in known_us_hols]
|
|
|
|
for hol in found:
|
|
|
|
assert hol.title
|
|
|
|
hol.title += " (NYSE)"
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
2023-12-09 13:51:24 +00:00
|
|
|
def get_holidays(country: str, start_date: date, end_date: date) -> list[Holiday]:
|
2023-12-09 11:39:33 +00:00
|
|
|
"""Get holidays."""
|
2023-12-09 13:51:24 +00:00
|
|
|
found: list[Holiday] = []
|
2023-12-09 11:39:33 +00:00
|
|
|
for year in range(start_date.year, end_date.year + 1):
|
2023-12-14 23:22:53 +00:00
|
|
|
hols = holidays.country_holidays(country.upper(), years=year, language="en_US")
|
2023-12-09 11:39:33 +00:00
|
|
|
found += [
|
2023-12-09 13:51:24 +00:00
|
|
|
Holiday(
|
2023-12-09 11:39:33 +00:00
|
|
|
date=hol_date,
|
2023-12-09 13:51:24 +00:00
|
|
|
name=title,
|
|
|
|
country=country.lower(),
|
2023-12-09 11:39:33 +00:00
|
|
|
)
|
|
|
|
for hol_date, title in hols.items()
|
|
|
|
if start_date < hol_date < end_date
|
|
|
|
]
|
|
|
|
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
2023-12-26 19:29:07 +00:00
|
|
|
def midnight(d: date) -> datetime:
|
|
|
|
"""Convert from date to midnight on that day."""
|
|
|
|
return datetime.combine(d, datetime.min.time())
|
|
|
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
def dates_from_rrule(
|
|
|
|
rrule: str, start: date, end: date
|
|
|
|
) -> typing.Sequence[datetime | date]:
|
|
|
|
"""Generate events from an RRULE between start_date and end_date."""
|
|
|
|
all_day = not any(param in rrule for param in ["BYHOUR", "BYMINUTE", "BYSECOND"])
|
2023-11-07 15:55:05 +00:00
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
return [
|
2023-12-26 19:29:07 +00:00
|
|
|
i.date() if all_day else uk_tz.localize(i)
|
|
|
|
for i in dateutil.rrule.rrulestr(rrule, dtstart=midnight(start)).between(
|
|
|
|
midnight(start), midnight(end)
|
2023-11-09 16:51:01 +00:00
|
|
|
)
|
|
|
|
]
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def waste_collection_events(data_dir: str) -> list[Event]:
|
|
|
|
"""Waste colllection events."""
|
|
|
|
postcode = "BS48 3HG"
|
|
|
|
uprn = "24071046"
|
|
|
|
|
|
|
|
html = await waste_schedule.get_html(data_dir, postcode, uprn)
|
|
|
|
root = lxml.html.fromstring(html)
|
|
|
|
events = waste_schedule.parse(root)
|
|
|
|
return events
|
|
|
|
|
|
|
|
|
|
|
|
async def bristol_waste_collection_events(
|
|
|
|
data_dir: str, start_date: date
|
|
|
|
) -> list[Event]:
|
|
|
|
"""Waste colllection events."""
|
|
|
|
uprn = "358335"
|
|
|
|
|
|
|
|
return await waste_schedule.get_bristol_gov_uk(start_date, data_dir, uprn)
|
|
|
|
|
|
|
|
|
2023-12-09 13:51:24 +00:00
|
|
|
def combine_holidays(holidays: list[Holiday]) -> list[Event]:
|
2023-11-08 14:40:07 +00:00
|
|
|
"""Combine UK and US holidays with the same date and title."""
|
2023-12-15 12:01:50 +00:00
|
|
|
|
|
|
|
all_countries = {h.country for h in holidays}
|
|
|
|
|
|
|
|
standard_name = {
|
|
|
|
(1, 1): "New Year's Day",
|
|
|
|
(1, 6): "Epiphany",
|
|
|
|
(12, 8): "Immaculate conception",
|
|
|
|
(12, 25): "Christmas Day",
|
|
|
|
(12, 26): "Boxing Day",
|
|
|
|
}
|
|
|
|
|
2023-12-09 13:51:24 +00:00
|
|
|
combined: collections.defaultdict[
|
|
|
|
tuple[date, str], set[str]
|
|
|
|
] = collections.defaultdict(set)
|
|
|
|
|
|
|
|
for h in holidays:
|
|
|
|
assert isinstance(h.name, str) and isinstance(h.date, date)
|
2023-11-08 04:06:04 +00:00
|
|
|
|
2023-12-15 12:01:50 +00:00
|
|
|
event_key = (h.date, standard_name.get((h.date.month, h.date.day), h.name))
|
2023-12-09 13:51:24 +00:00
|
|
|
combined[event_key].add(h.country)
|
|
|
|
|
|
|
|
events: list[Event] = []
|
|
|
|
for (d, name), countries in combined.items():
|
2023-12-15 12:01:50 +00:00
|
|
|
if len(countries) == len(all_countries):
|
|
|
|
country_list = ""
|
|
|
|
elif len(countries) < len(all_countries) / 2:
|
|
|
|
country_list = ", ".join(sorted(country.upper() for country in countries))
|
|
|
|
else:
|
|
|
|
country_list = "not " + ", ".join(
|
|
|
|
sorted(country.upper() for country in all_countries - set(countries))
|
|
|
|
)
|
|
|
|
|
2023-12-15 12:03:58 +00:00
|
|
|
e = Event(
|
|
|
|
name="holiday",
|
|
|
|
date=d,
|
|
|
|
title=f"{name} ({country_list})" if country_list else name,
|
|
|
|
)
|
2023-12-09 13:51:24 +00:00
|
|
|
events.append(e)
|
|
|
|
|
|
|
|
return events
|
2023-11-08 04:06:04 +00:00
|
|
|
|
|
|
|
|
2023-11-09 23:35:38 +00:00
|
|
|
def get_yaml_event_date_field(item: dict[str, str]) -> str:
|
|
|
|
"""Event date field name."""
|
|
|
|
return (
|
|
|
|
"end_date"
|
|
|
|
if item["name"] == "travel_insurance"
|
|
|
|
else ("start_date" if "start_date" in item else "date")
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-11-10 00:02:22 +00:00
|
|
|
def get_yaml_event_end_date_field(item: dict[str, str]) -> str:
|
|
|
|
"""Event date field name."""
|
|
|
|
return (
|
|
|
|
"end_date"
|
|
|
|
if item["name"] == "travel_insurance"
|
|
|
|
else ("start_date" if "start_date" in item else "date")
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
def read_events_yaml(data_dir: str, start: date, end: date) -> list[Event]:
|
2023-11-08 14:40:07 +00:00
|
|
|
"""Read eventes from YAML file."""
|
2023-11-09 16:51:01 +00:00
|
|
|
events: list[Event] = []
|
|
|
|
for item in yaml.safe_load(open(os.path.join(data_dir, "events.yaml"))):
|
2023-11-10 00:02:22 +00:00
|
|
|
duration = (
|
|
|
|
isodate.parse_duration(item["duration"]) if "duration" in item else None
|
|
|
|
)
|
2023-11-09 16:51:01 +00:00
|
|
|
dates = (
|
|
|
|
dates_from_rrule(item["rrule"], start, end)
|
|
|
|
if "rrule" in item
|
2023-11-09 23:35:38 +00:00
|
|
|
else [item[get_yaml_event_date_field(item)]]
|
2023-11-08 12:48:02 +00:00
|
|
|
)
|
2023-11-09 16:51:01 +00:00
|
|
|
for dt in dates:
|
|
|
|
e = Event(
|
|
|
|
name=item["name"],
|
|
|
|
date=dt,
|
2023-11-09 23:35:38 +00:00
|
|
|
end_date=(
|
2023-11-10 00:02:22 +00:00
|
|
|
dt + duration
|
|
|
|
if duration
|
|
|
|
else (
|
|
|
|
item.get("end_date")
|
|
|
|
if item["name"] != "travel_insurance"
|
|
|
|
else None
|
|
|
|
)
|
2023-11-09 23:35:38 +00:00
|
|
|
),
|
2023-11-09 16:51:01 +00:00
|
|
|
title=item.get("title"),
|
|
|
|
url=item.get("url"),
|
|
|
|
)
|
|
|
|
events.append(e)
|
|
|
|
return events
|
2023-11-08 12:48:02 +00:00
|
|
|
|
|
|
|
|
2023-12-22 14:47:06 +00:00
|
|
|
def find_markets_during_stay(
|
|
|
|
accommodation_events: list[Event], markets: list[Event]
|
|
|
|
) -> list[Event]:
|
|
|
|
"""Market events that happen during accommodation stays."""
|
|
|
|
overlapping_markets = []
|
|
|
|
for market in markets:
|
|
|
|
for e in accommodation_events:
|
|
|
|
# Check if the market date is within the accommodation dates.
|
|
|
|
if e.date <= market.date <= (e.end_date or e.date):
|
|
|
|
overlapping_markets.append(market)
|
|
|
|
break # Breaks the inner loop if overlap is found.
|
|
|
|
return overlapping_markets
|
|
|
|
|
|
|
|
|
2023-12-07 15:52:48 +00:00
|
|
|
async def get_data(
|
|
|
|
now: datetime, config: flask.config.Config
|
|
|
|
) -> typing.Mapping[str, str | object]:
|
2023-11-19 11:42:39 +00:00
|
|
|
"""Get data to display on agenda dashboard."""
|
2023-12-07 15:52:48 +00:00
|
|
|
data_dir = config["DATA_DIR"]
|
2023-11-07 15:55:05 +00:00
|
|
|
|
|
|
|
rocket_dir = os.path.join(data_dir, "thespacedevs")
|
|
|
|
today = now.date()
|
|
|
|
two_weeks_ago = today - timedelta(weeks=2)
|
|
|
|
last_week = today - timedelta(weeks=1)
|
|
|
|
last_year = today - timedelta(days=365)
|
|
|
|
next_year = today + timedelta(days=365)
|
|
|
|
|
|
|
|
minus_365 = now - timedelta(days=365)
|
|
|
|
plus_365 = now + timedelta(days=365)
|
|
|
|
|
|
|
|
(
|
|
|
|
gbpusd,
|
|
|
|
gwr_advance_tickets,
|
|
|
|
bank_holiday,
|
|
|
|
rockets,
|
|
|
|
backwell_bins,
|
|
|
|
bristol_bins,
|
|
|
|
) = await asyncio.gather(
|
|
|
|
fx.get_gbpusd(config),
|
|
|
|
gwr.advance_ticket_date(data_dir),
|
|
|
|
uk_holiday.bank_holiday_list(last_year, next_year, data_dir),
|
|
|
|
thespacedevs.get_launches(rocket_dir, limit=40),
|
|
|
|
waste_collection_events(data_dir),
|
|
|
|
bristol_waste_collection_events(data_dir, today),
|
|
|
|
)
|
|
|
|
|
|
|
|
reply = {
|
|
|
|
"now": now,
|
|
|
|
"gbpusd": gbpusd,
|
|
|
|
"stock_markets": stock_market.open_and_close(),
|
|
|
|
"rockets": rockets,
|
2023-11-10 23:57:38 +00:00
|
|
|
"gwr_advance_tickets": gwr_advance_tickets,
|
2023-11-07 15:55:05 +00:00
|
|
|
}
|
|
|
|
|
2023-12-07 15:52:48 +00:00
|
|
|
my_data = config["PERSONAL_DATA"]
|
2023-11-10 10:42:17 +00:00
|
|
|
events = (
|
|
|
|
[
|
|
|
|
Event(name="mothers_day", date=uk_holiday.get_mothers_day(today)),
|
|
|
|
]
|
|
|
|
+ timezone_transition(minus_365, plus_365, "uk_clock_change", "Europe/London")
|
|
|
|
+ timezone_transition(
|
|
|
|
minus_365, plus_365, "us_clock_change", "America/New_York"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-11-19 11:41:42 +00:00
|
|
|
if gwr_advance_tickets:
|
|
|
|
events.append(Event(name="gwr_advance_tickets", date=gwr_advance_tickets))
|
|
|
|
|
2023-12-23 16:12:49 +00:00
|
|
|
us_hols = us_holidays(last_year, next_year)
|
|
|
|
|
|
|
|
holidays: list[Holiday] = bank_holiday + us_hols
|
2023-12-09 13:51:24 +00:00
|
|
|
for country in (
|
|
|
|
"at",
|
|
|
|
"be",
|
2023-12-24 08:31:46 +00:00
|
|
|
"br",
|
2023-12-09 13:51:24 +00:00
|
|
|
"ch",
|
|
|
|
"cz",
|
|
|
|
"de",
|
|
|
|
"dk",
|
|
|
|
"ee",
|
|
|
|
"es",
|
2023-12-26 23:48:46 +00:00
|
|
|
"fi",
|
2023-12-09 13:51:24 +00:00
|
|
|
"fr",
|
|
|
|
"gr",
|
|
|
|
"it",
|
|
|
|
"ke",
|
|
|
|
"nl",
|
|
|
|
"pl",
|
|
|
|
):
|
|
|
|
holidays += get_holidays(country, last_year, next_year)
|
|
|
|
|
2023-12-23 16:12:49 +00:00
|
|
|
events += get_nyse_holidays(last_year, next_year, us_hols)
|
|
|
|
|
2023-12-22 14:47:06 +00:00
|
|
|
accommodation_events = accommodation.get_events(
|
|
|
|
os.path.join(my_data, "accommodation.yaml")
|
|
|
|
)
|
2023-12-09 13:51:24 +00:00
|
|
|
|
2023-12-22 14:47:06 +00:00
|
|
|
events += combine_holidays(holidays)
|
2023-11-07 15:55:05 +00:00
|
|
|
events += birthday.get_birthdays(last_year, os.path.join(my_data, "entities.yaml"))
|
2023-12-22 14:47:06 +00:00
|
|
|
events += accommodation_events
|
2023-12-07 15:52:48 +00:00
|
|
|
events += travel.all_events(my_data)
|
2023-11-07 15:55:05 +00:00
|
|
|
events += conference.get_list(os.path.join(my_data, "conferences.yaml"))
|
|
|
|
events += backwell_bins + bristol_bins
|
2023-11-09 16:51:01 +00:00
|
|
|
events += read_events_yaml(my_data, last_year, next_year)
|
2023-11-07 15:55:05 +00:00
|
|
|
events += subscription.get_events(os.path.join(my_data, "subscriptions.yaml"))
|
2023-11-10 10:42:17 +00:00
|
|
|
events += economist.publication_dates(last_week, next_year)
|
2023-11-21 08:15:16 +00:00
|
|
|
events += meetup.get_events(my_data)
|
2023-12-01 10:35:58 +00:00
|
|
|
events += hn.whoishiring(last_year, next_year)
|
2023-11-07 15:55:05 +00:00
|
|
|
|
2023-11-12 18:58:56 +00:00
|
|
|
events += domains.renewal_dates(my_data)
|
|
|
|
|
2023-12-22 14:47:06 +00:00
|
|
|
# hide markets that happen while away
|
|
|
|
markets = [e for e in events if e.name == "market"]
|
|
|
|
|
|
|
|
overlapping_markets = find_markets_during_stay(accommodation_events, markets)
|
|
|
|
for market in overlapping_markets:
|
|
|
|
events.remove(market)
|
|
|
|
|
2023-11-23 22:05:05 +00:00
|
|
|
for launch in rockets:
|
2023-11-25 10:08:49 +00:00
|
|
|
dt = None
|
|
|
|
|
|
|
|
if launch["net_precision"] == "Day":
|
|
|
|
dt = datetime.strptime(launch["net"], "%Y-%m-%dT00:00:00Z").date()
|
|
|
|
elif launch["t0_time"]:
|
|
|
|
dt = pytz.utc.localize(
|
|
|
|
datetime.strptime(launch["net"], "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
)
|
|
|
|
|
|
|
|
if not dt:
|
2023-11-23 22:05:05 +00:00
|
|
|
continue
|
2023-11-25 10:08:49 +00:00
|
|
|
|
2023-12-09 11:37:52 +00:00
|
|
|
rocket_name = f'🚀{launch["rocket"]}: {launch["mission_name"] or "[no mission]"}'
|
2023-11-25 10:08:49 +00:00
|
|
|
e = Event(name="rocket", date=dt, title=rocket_name)
|
2023-11-23 22:05:05 +00:00
|
|
|
events.append(e)
|
|
|
|
|
2023-12-05 13:59:00 +00:00
|
|
|
events += [Event(name="today", date=today)]
|
|
|
|
events.sort(key=lambda e: (e.as_datetime, e.name != "today"))
|
2023-11-07 15:55:05 +00:00
|
|
|
|
2023-11-09 16:51:01 +00:00
|
|
|
observer = sun.bristol()
|
|
|
|
reply["sunrise"] = sun.sunrise(observer)
|
|
|
|
reply["sunset"] = sun.sunset(observer)
|
2023-11-07 15:55:05 +00:00
|
|
|
reply["events"] = events
|
|
|
|
reply["last_week"] = last_week
|
|
|
|
reply["two_weeks_ago"] = two_weeks_ago
|
|
|
|
|
|
|
|
reply["fullcalendar_events"] = calendar.build_events(events)
|
|
|
|
|
|
|
|
return reply
|