Add needs_commons=false option and redesign detail and index pages
Add a needs_commons parameter (default true) to both the API endpoint and the detail page. When needs_commons=false, look up Wikidata items by OSM relation ID (P402) via WDQS to return the most specific matching item even if it has no Wikimedia Commons category. Only activate this path when the matched item has no Commons category, so that locations with a Commons cat always get the same result regardless of the parameter. Remove the nearest-polygon fallback that was returning incorrect results for inland points in broad admin areas (e.g. returning Falmer for a point in Brighton). That fallback found the nearest polygon by boundary distance without requiring containment, so the pin would appear outside the polygon. The geosearch handles these cases correctly. Redesign the detail page: place name as heading, result card, collapsible API response and SPARQL query, improved OSM element cards with left-border highlight on the matched element, and a toggle button between modes. Redesign the index page: two-column layout with numbered steps and API documentation including the needs_commons parameter, Bootstrap form, and examples as a table. Closes #28 (Add support for returning Wikidata item instead of commons category) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
90b009fc90
commit
cd9d8779d3
6 changed files with 385 additions and 138 deletions
77
lookup.py
77
lookup.py
|
|
@ -106,7 +106,9 @@ def add_missing_commons_cat(rows: list[StrDict]) -> None:
|
|||
row["commonsCat"] = {"type": "literal", "value": commons_cat}
|
||||
|
||||
|
||||
def lat_lon_to_wikidata(lat: float, lon: float) -> dict[str, typing.Any]:
|
||||
def lat_lon_to_wikidata(
|
||||
lat: float, lon: float, needs_commons: bool = True
|
||||
) -> dict[str, typing.Any]:
|
||||
"""Lookup lat/lon and find most appropriate Wikidata item."""
|
||||
scotland_code = scotland.get_scotland_code(lat, lon)
|
||||
|
||||
|
|
@ -148,32 +150,54 @@ def lat_lon_to_wikidata(lat: float, lon: float) -> dict[str, typing.Any]:
|
|||
if not nearby_result.get("missing"):
|
||||
return {"elements": elements, "result": nearby_result}
|
||||
|
||||
# Point is in a broad area (e.g. country) — try nearest specific polygon
|
||||
nearby = model.Polygon.nearest(lat, lon)
|
||||
if nearby and nearby.tags:
|
||||
tags: typing.Mapping[str, str] = nearby.tags
|
||||
al = get_admin_level(tags)
|
||||
hit = (
|
||||
hit_from_wikidata_tag(tags)
|
||||
or hit_from_ref_gss_tag(tags)
|
||||
or hit_from_name(tags, lat, lon)
|
||||
)
|
||||
if hit:
|
||||
hit["admin_level"] = al
|
||||
hit["element"] = nearby.osm_id
|
||||
hit["geojson"] = typing.cast(str, nearby.geojson_str)
|
||||
nearby_result = wikidata.build_dict(hit, lat, lon)
|
||||
if not nearby_result.get("missing"):
|
||||
return {"elements": elements, "result": nearby_result}
|
||||
if not needs_commons:
|
||||
# Direct lookup: find Wikidata items whose P402 (OSM relation ID) matches
|
||||
# one of the OSM polygons that contain this point.
|
||||
osm_id_to_element: dict[int, model.Polygon] = {}
|
||||
relation_ids_for_lookup: list[int] = []
|
||||
for e in elements:
|
||||
if e.osm_id < 0:
|
||||
rel_id = abs(e.osm_id)
|
||||
relation_ids_for_lookup.append(rel_id)
|
||||
osm_id_to_element[e.osm_id] = e
|
||||
|
||||
row = wikidata.geosearch(lat, lon)
|
||||
if relation_ids_for_lookup:
|
||||
lookup_rows = wikidata.lookup_wikidata_by_osm_relation_ids(
|
||||
relation_ids_for_lookup
|
||||
)
|
||||
rel_to_hit: dict[int, wikidata.Hit] = {}
|
||||
for row in lookup_rows:
|
||||
rel_id = int(row["osmRelation"]["value"])
|
||||
if rel_id not in rel_to_hit:
|
||||
rel_to_hit[rel_id] = wikidata.hit_from_row(row)
|
||||
|
||||
# Iterate elements in specificity order (smallest area first, from coords_within).
|
||||
# Only use hits without a Commons category — if there's a Commons cat,
|
||||
# the geosearch path will find it (or something more specific).
|
||||
for e in elements:
|
||||
if e.osm_id >= 0:
|
||||
continue
|
||||
rel_id = abs(e.osm_id)
|
||||
if rel_id not in rel_to_hit:
|
||||
continue
|
||||
hit = rel_to_hit[rel_id]
|
||||
if hit.get("commons_cat"):
|
||||
continue
|
||||
if e.tags:
|
||||
hit["admin_level"] = get_admin_level(e.tags)
|
||||
hit["element"] = e.osm_id
|
||||
hit["geojson"] = typing.cast(str, e.geojson_str)
|
||||
result = wikidata.build_dict(hit, lat, lon)
|
||||
return {"elements": elements, "result": result}
|
||||
|
||||
query = wikidata.geosearch_query(lat, lon)
|
||||
geo_rows = wikidata.wdqs(query)
|
||||
row = wikidata.filter_geosearch_row(geo_rows)
|
||||
if row:
|
||||
hit = wikidata.commons_from_rows([row])
|
||||
elements = []
|
||||
result = wikidata.build_dict(hit, lat, lon)
|
||||
|
||||
query = wikidata.geosearch_query(lat, lon)
|
||||
|
||||
return {"elements": elements, "result": result, "query": query}
|
||||
|
||||
|
||||
|
|
@ -343,7 +367,8 @@ def index() -> str | Response:
|
|||
if error_msg := validate_coordinates(lat, lon):
|
||||
return jsonify(coords={"lat": lat, "lon": lon}, error=error_msg)
|
||||
|
||||
result = lat_lon_to_wikidata(lat, lon)["result"]
|
||||
needs_commons = request.args.get("needs_commons", "true").lower() != "false"
|
||||
result = lat_lon_to_wikidata(lat, lon, needs_commons=needs_commons)["result"]
|
||||
result.pop("element", None)
|
||||
result.pop("geojson", None)
|
||||
if logging_enabled:
|
||||
|
|
@ -401,7 +426,7 @@ def highlight_sparql(query: str) -> str:
|
|||
app.jinja_env.filters["highlight_sparql"] = highlight_sparql
|
||||
|
||||
|
||||
def build_detail_page(lat: float, lon: float) -> str:
|
||||
def build_detail_page(lat: float, lon: float, needs_commons: bool = True) -> str:
|
||||
"""Run lookup and build detail page."""
|
||||
if lat < -90 or lat > 90 or lon < -180 or lon > 180:
|
||||
error = (
|
||||
|
|
@ -411,7 +436,7 @@ def build_detail_page(lat: float, lon: float) -> str:
|
|||
return render_template("query_error.html", lat=lat, lon=lon, error=error)
|
||||
|
||||
try:
|
||||
reply = lat_lon_to_wikidata(lat, lon)
|
||||
reply = lat_lon_to_wikidata(lat, lon, needs_commons=needs_commons)
|
||||
except wikidata.QueryError as e:
|
||||
query, r = e.args
|
||||
return render_template("query_error.html", lat=lat, lon=lon, query=query, r=r)
|
||||
|
|
@ -429,6 +454,7 @@ def build_detail_page(lat: float, lon: float) -> str:
|
|||
element_id=element,
|
||||
geojson=geojson,
|
||||
css=css,
|
||||
needs_commons=needs_commons,
|
||||
**reply,
|
||||
)
|
||||
|
||||
|
|
@ -453,7 +479,8 @@ def detail_page() -> Response | str:
|
|||
"coordinate_error.html", lat_str=lat_str, lon_str=lon_str, error=error
|
||||
)
|
||||
|
||||
return build_detail_page(lat, lon)
|
||||
needs_commons = request.args.get("needs_commons", "true").lower() != "false"
|
||||
return build_detail_page(lat, lon, needs_commons=needs_commons)
|
||||
|
||||
|
||||
@app.route("/reports")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue