parent
d67944a171
commit
5c3cb47ab5
|
@ -24,6 +24,7 @@ from . import (
|
||||||
economist,
|
economist,
|
||||||
fx,
|
fx,
|
||||||
gwr,
|
gwr,
|
||||||
|
meetup,
|
||||||
stock_market,
|
stock_market,
|
||||||
subscription,
|
subscription,
|
||||||
sun,
|
sun,
|
||||||
|
@ -259,6 +260,7 @@ async def get_data(now: datetime) -> typing.Mapping[str, str | object]:
|
||||||
events += read_events_yaml(my_data, last_year, next_year)
|
events += read_events_yaml(my_data, last_year, next_year)
|
||||||
events += subscription.get_events(os.path.join(my_data, "subscriptions.yaml"))
|
events += subscription.get_events(os.path.join(my_data, "subscriptions.yaml"))
|
||||||
events += economist.publication_dates(last_week, next_year)
|
events += economist.publication_dates(last_week, next_year)
|
||||||
|
events += meetup.get_events(my_data)
|
||||||
|
|
||||||
events += domains.renewal_dates(my_data)
|
events += domains.renewal_dates(my_data)
|
||||||
|
|
||||||
|
|
29
agenda/meetup.py
Normal file
29
agenda/meetup.py
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
"""Meetup.com events I'm attending."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .types import Event
|
||||||
|
|
||||||
|
|
||||||
|
def get_events(data_dir: str) -> list[Event]:
|
||||||
|
"""Get events."""
|
||||||
|
data = json.load(open(os.path.join(data_dir, "meetup_upcoming.json")))["data"]
|
||||||
|
|
||||||
|
events: list[Event] = []
|
||||||
|
item_list = data["self"]["upcomingEvents"]["edges"]
|
||||||
|
for item in item_list:
|
||||||
|
item_event = item["node"]["event"]
|
||||||
|
start = datetime.fromisoformat(item["node"]["event"]["dateTime"])
|
||||||
|
end = datetime.fromisoformat(item["node"]["event"]["endTime"])
|
||||||
|
e = Event(
|
||||||
|
date=start,
|
||||||
|
end_date=end,
|
||||||
|
name="meetup",
|
||||||
|
title=item_event["title"],
|
||||||
|
url=item_event["eventUrl"],
|
||||||
|
)
|
||||||
|
events.append(e)
|
||||||
|
|
||||||
|
return events
|
Loading…
Reference in a new issue