2019-08-18 15:56:53 +01:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import random
|
2023-10-10 10:11:23 +01:00
|
|
|
import typing
|
|
|
|
|
|
|
|
import sqlalchemy
|
|
|
|
from flask import Flask, jsonify, redirect, render_template, request, url_for
|
|
|
|
from werkzeug.wrappers import Response
|
|
|
|
|
|
|
|
import geocode
|
|
|
|
from geocode import database, model, scotland, wikidata
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-16 16:21:38 +01:00
|
|
|
city_of_london_qid = "Q23311"
|
2019-08-18 15:56:53 +01:00
|
|
|
app = Flask(__name__)
|
2021-04-16 21:24:59 +01:00
|
|
|
app.config.from_object("config.default")
|
2021-04-17 17:02:53 +01:00
|
|
|
database.init_app(app)
|
2019-08-18 15:56:53 +01:00
|
|
|
|
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
def get_random_lat_lon() -> tuple[float, float]:
|
|
|
|
"""Select random lat/lon within the UK."""
|
2021-04-16 16:14:12 +01:00
|
|
|
south, east = 50.8520, 0.3536
|
|
|
|
north, west = 53.7984, -2.7296
|
|
|
|
|
|
|
|
mul = 10000
|
2019-08-18 15:56:53 +01:00
|
|
|
lat = random.randrange(int(south * mul), int(north * mul)) / mul
|
|
|
|
lon = random.randrange(int(west * mul), int(east * mul)) / mul
|
|
|
|
|
2021-04-16 16:14:12 +01:00
|
|
|
return lat, lon
|
|
|
|
|
|
|
|
|
2023-10-13 16:50:27 +01:00
|
|
|
Elements = sqlalchemy.orm.query.Query
|
2023-10-10 10:11:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
def do_lookup(
|
|
|
|
elements: Elements, lat: str | float, lon: str | float
|
|
|
|
) -> wikidata.WikidataDict:
|
2021-04-15 12:28:02 +01:00
|
|
|
try:
|
|
|
|
hit = osm_lookup(elements, lat, lon)
|
2021-04-17 17:02:53 +01:00
|
|
|
except wikidata.QueryError as e:
|
2021-04-15 12:28:02 +01:00
|
|
|
return {
|
2021-04-16 16:21:38 +01:00
|
|
|
"query": e.query,
|
|
|
|
"error": e.r.text,
|
|
|
|
"query_url": "https://query.wikidata.org/#" + e.query,
|
2021-04-15 12:28:02 +01:00
|
|
|
}
|
|
|
|
|
2021-04-17 18:29:09 +01:00
|
|
|
return wikidata.build_dict(hit, lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-16 16:14:32 +01:00
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
def lat_lon_to_wikidata(lat: str | float, lon: str | float) -> dict[str, typing.Any]:
|
2021-04-17 17:02:53 +01:00
|
|
|
scotland_code = scotland.get_scotland_code(lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
elements: typing.Any
|
2021-04-15 12:28:02 +01:00
|
|
|
if scotland_code:
|
2021-04-17 18:29:09 +01:00
|
|
|
rows = wikidata.lookup_scottish_parish_in_wikidata(scotland_code)
|
|
|
|
hit = wikidata.commons_from_rows(rows)
|
2021-04-15 12:28:02 +01:00
|
|
|
elements = []
|
2021-04-17 18:29:09 +01:00
|
|
|
result = wikidata.build_dict(hit, lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-16 16:22:39 +01:00
|
|
|
return {"elements": elements, "result": result}
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-17 17:31:58 +01:00
|
|
|
elements = model.Polygon.coords_within(lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
result = do_lookup(elements, lat, lon)
|
|
|
|
|
|
|
|
# special case because the City of London is admin_level=6 in OSM
|
2022-05-18 14:12:34 +01:00
|
|
|
if result.get("wikidata") == city_of_london_qid:
|
2021-04-16 16:22:39 +01:00
|
|
|
return {"elements": elements, "result": result}
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2022-05-18 14:12:34 +01:00
|
|
|
admin_level = result.get("admin_level")
|
2021-04-15 12:28:02 +01:00
|
|
|
|
|
|
|
if not admin_level or admin_level >= 7:
|
2021-04-16 16:22:39 +01:00
|
|
|
return {"elements": elements, "result": result}
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-17 18:29:09 +01:00
|
|
|
row = wikidata.geosearch(lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
if row:
|
2021-04-17 18:29:09 +01:00
|
|
|
hit = wikidata.commons_from_rows([row])
|
2021-04-15 12:28:02 +01:00
|
|
|
elements = []
|
2021-04-17 18:29:09 +01:00
|
|
|
result = wikidata.build_dict(hit, lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2021-04-16 16:22:39 +01:00
|
|
|
return {"elements": elements, "result": result}
|
2019-08-18 15:56:53 +01:00
|
|
|
|
2021-04-16 16:14:32 +01:00
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
def osm_lookup(
|
2023-10-13 16:50:27 +01:00
|
|
|
elements: Elements, lat: str | float, lon: str | float # type:ignore
|
2023-10-10 10:11:23 +01:00
|
|
|
) -> wikidata.Hit | None:
|
|
|
|
"""OSM lookup."""
|
|
|
|
ret: wikidata.Hit | None
|
2021-04-15 12:28:02 +01:00
|
|
|
for e in elements:
|
2023-10-10 10:11:23 +01:00
|
|
|
assert isinstance(e, model.Polygon)
|
|
|
|
assert e.tags
|
|
|
|
tags: typing.Mapping[str, typing.Any] = e.tags
|
2021-04-16 16:21:38 +01:00
|
|
|
admin_level_tag = tags.get("admin_level")
|
2023-10-10 10:11:23 +01:00
|
|
|
admin_level: int | None = (
|
2021-04-16 16:22:39 +01:00
|
|
|
int(admin_level_tag)
|
|
|
|
if admin_level_tag and admin_level_tag.isdigit()
|
|
|
|
else None
|
|
|
|
)
|
2021-04-16 16:21:38 +01:00
|
|
|
if not admin_level and tags.get("boundary") != "political":
|
2021-04-15 12:28:02 +01:00
|
|
|
continue
|
2021-04-16 16:21:38 +01:00
|
|
|
if "wikidata" in tags:
|
|
|
|
qid = tags["wikidata"]
|
2021-04-17 17:02:53 +01:00
|
|
|
commons = wikidata.qid_to_commons_category(qid)
|
2021-04-15 12:28:02 +01:00
|
|
|
if commons:
|
|
|
|
return {
|
2021-04-16 16:21:38 +01:00
|
|
|
"wikidata": qid,
|
|
|
|
"commons_cat": commons,
|
|
|
|
"admin_level": admin_level,
|
2023-10-13 16:50:27 +01:00
|
|
|
"element": e.osm_id,
|
2021-04-15 12:28:02 +01:00
|
|
|
}
|
2021-04-16 16:21:38 +01:00
|
|
|
gss = tags.get("ref:gss")
|
2021-04-15 12:28:02 +01:00
|
|
|
if gss:
|
2021-04-17 18:29:09 +01:00
|
|
|
ret = wikidata.get_commons_cat_from_gss(gss)
|
2021-04-15 12:28:02 +01:00
|
|
|
if ret:
|
2021-04-16 16:21:38 +01:00
|
|
|
ret["admin_level"] = admin_level
|
2023-10-13 16:50:27 +01:00
|
|
|
ret["element"] = e.osm_id
|
2021-04-15 12:28:02 +01:00
|
|
|
return ret
|
|
|
|
|
2021-04-16 16:21:38 +01:00
|
|
|
name = tags.get("name")
|
2021-04-15 12:28:02 +01:00
|
|
|
if not name:
|
2019-08-18 15:56:53 +01:00
|
|
|
continue
|
2021-04-16 16:21:38 +01:00
|
|
|
if name.endswith(" CP"):
|
2021-04-15 12:28:02 +01:00
|
|
|
name = name[:-3]
|
2021-04-17 18:29:09 +01:00
|
|
|
rows = wikidata.lookup_wikidata_by_name(name, lat, lon)
|
2021-04-15 12:28:02 +01:00
|
|
|
|
|
|
|
if len(rows) == 1:
|
2021-04-17 18:29:09 +01:00
|
|
|
ret = wikidata.commons_from_rows(rows)
|
2021-04-15 12:28:02 +01:00
|
|
|
if ret:
|
2021-04-16 16:21:38 +01:00
|
|
|
ret["admin_level"] = admin_level
|
2023-10-13 16:50:27 +01:00
|
|
|
ret["element"] = e.osm_id
|
2021-04-15 12:28:02 +01:00
|
|
|
return ret
|
|
|
|
|
2023-10-13 16:50:27 +01:00
|
|
|
has_wikidata_tag = [e.tags for e in elements if e.tags.get("wikidata")]
|
2021-04-15 12:28:02 +01:00
|
|
|
if len(has_wikidata_tag) != 1:
|
2023-10-10 10:11:23 +01:00
|
|
|
return None
|
2021-04-15 12:28:02 +01:00
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
assert has_wikidata_tag[0]
|
2021-04-16 16:21:38 +01:00
|
|
|
qid = has_wikidata_tag[0]["wikidata"]
|
2023-10-10 10:11:23 +01:00
|
|
|
return typing.cast(
|
|
|
|
wikidata.Hit,
|
|
|
|
{
|
|
|
|
"wikidata": qid,
|
|
|
|
"commons_cat": wikidata.qid_to_commons_category(qid),
|
|
|
|
"admin_level": admin_level,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def redirect_to_detail(q: str) -> Response:
|
|
|
|
"""Redirect to detail page."""
|
|
|
|
lat, lon = [v.strip() for v in q.split(",", 1)]
|
|
|
|
return redirect(url_for("detail_page", lat=lat, lon=lon))
|
2019-08-18 15:56:53 +01:00
|
|
|
|
|
|
|
|
2021-04-16 21:30:25 +01:00
|
|
|
@app.route("/")
|
2023-10-10 10:11:23 +01:00
|
|
|
def index() -> str | Response:
|
|
|
|
"""Index page."""
|
2021-04-16 21:30:25 +01:00
|
|
|
q = request.args.get("q")
|
|
|
|
if q and q.strip():
|
2023-10-10 10:11:23 +01:00
|
|
|
return redirect_to_detail(q)
|
2021-04-16 21:30:25 +01:00
|
|
|
|
|
|
|
lat, lon = request.args.get("lat"), request.args.get("lon")
|
|
|
|
|
|
|
|
if lat is not None and lon is not None:
|
2023-10-13 16:50:27 +01:00
|
|
|
result = lat_lon_to_wikidata(lat, lon)["result"]
|
|
|
|
result.pop("element", None)
|
|
|
|
return jsonify(result)
|
2021-04-16 21:30:25 +01:00
|
|
|
|
|
|
|
samples = sorted(geocode.samples, key=lambda row: row[2])
|
|
|
|
return render_template("index.html", samples=samples)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/random")
|
2023-10-10 10:11:23 +01:00
|
|
|
def random_location() -> str:
|
|
|
|
"""Return detail page for random lat/lon."""
|
2021-04-16 21:30:25 +01:00
|
|
|
lat, lon = get_random_lat_lon()
|
|
|
|
|
2021-04-17 17:31:58 +01:00
|
|
|
elements = model.Polygon.coords_within(lat, lon)
|
2021-04-16 21:30:25 +01:00
|
|
|
result = do_lookup(elements, lat, lon)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
"detail.html", lat=lat, lon=lon, result=result, elements=elements
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/wikidata_tag")
|
2023-10-10 10:11:23 +01:00
|
|
|
def wikidata_tag() -> str:
|
|
|
|
"""Lookup Wikidata tag for lat/lon."""
|
|
|
|
lat_str, lon_str = request.args["lat"], request.args["lon"]
|
|
|
|
lat, lon = float(lat_str), float(lon_str)
|
2021-04-16 21:30:25 +01:00
|
|
|
|
2021-04-17 17:02:53 +01:00
|
|
|
scotland_code = scotland.get_scotland_code(lat, lon)
|
2021-04-16 21:30:25 +01:00
|
|
|
|
2023-10-10 10:11:23 +01:00
|
|
|
elements: typing.Any
|
2021-04-16 21:30:25 +01:00
|
|
|
if scotland_code:
|
2021-04-17 18:29:09 +01:00
|
|
|
rows = wikidata.lookup_scottish_parish_in_wikidata(scotland_code)
|
|
|
|
hit = wikidata.commons_from_rows(rows)
|
2021-04-16 21:30:25 +01:00
|
|
|
elements = []
|
2021-04-17 18:29:09 +01:00
|
|
|
result = wikidata.build_dict(hit, lat, lon)
|
2021-04-16 21:30:25 +01:00
|
|
|
else:
|
2021-04-17 17:31:58 +01:00
|
|
|
elements = model.Polygon.coords_within(lat, lon)
|
2021-04-16 21:30:25 +01:00
|
|
|
result = do_lookup(elements, lat, lon)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
"wikidata_tag.html", lat=lat, lon=lon, result=result, elements=elements
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/detail")
|
2023-10-10 10:11:23 +01:00
|
|
|
def detail_page() -> Response | str:
|
|
|
|
"""Detail page."""
|
2021-04-16 21:30:25 +01:00
|
|
|
try:
|
2023-10-10 10:11:23 +01:00
|
|
|
lat_str, lon_str = request.args["lat"], request.args["lon"]
|
|
|
|
lat, lon = float(lat_str), float(lon_str)
|
2021-04-16 21:30:25 +01:00
|
|
|
except TypeError:
|
|
|
|
return redirect(url_for("index"))
|
2022-05-18 14:12:34 +01:00
|
|
|
try:
|
|
|
|
reply = lat_lon_to_wikidata(lat, lon)
|
|
|
|
except wikidata.QueryError as e:
|
|
|
|
query, r = e.args
|
2023-10-10 10:08:59 +01:00
|
|
|
return render_template("query_error.html", lat=lat, lon=lon, query=query, r=r)
|
2022-05-18 14:12:34 +01:00
|
|
|
|
2023-10-13 16:50:27 +01:00
|
|
|
element = reply["result"].pop("element", None)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
"detail.html", lat=lat, lon=lon, str=str, element_id=element, **reply
|
|
|
|
)
|
2021-04-16 21:30:25 +01:00
|
|
|
|
|
|
|
|
2021-04-16 16:21:38 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
app.run(host="0.0.0.0")
|