geocode/lookup.py

523 lines
14 KiB
Python
Raw Normal View History

2019-08-18 15:56:53 +01:00
#!/usr/bin/python3
2021-04-15 12:28:02 +01:00
from flask import Flask, render_template, request, jsonify, redirect, url_for
2019-08-18 15:56:53 +01:00
import requests
import os
import json
import urllib.parse
import random
2021-04-15 12:28:02 +01:00
import simplejson
import psycopg2
from geopy.distance import distance
# select gid, code, name from scotland where st_contains(geom, ST_Transform(ST_SetSRID(ST_MakePoint(-4.177, 55.7644), 4326), 27700));
2019-08-18 15:56:53 +01:00
2021-04-16 16:21:38 +01:00
commons_cat_start = "https://commons.wikimedia.org/wiki/Category:"
2019-08-18 15:56:53 +01:00
use_cache = False
2021-04-15 12:25:14 +01:00
headers = {
2021-04-16 16:21:38 +01:00
"User-Agent": "UK gecode/0.1 (edward@4angle.com)",
2021-04-15 12:25:14 +01:00
}
2021-04-16 16:21:38 +01:00
OVERPASS_URL = "https://lz4.overpass-api.de"
wikidata_query_api_url = "https://query.wikidata.org/bigdata/namespace/wdq/sparql"
wd_entity = "http://www.wikidata.org/entity/Q"
city_of_london_qid = "Q23311"
2019-08-18 15:56:53 +01:00
samples = [
2021-04-16 16:21:38 +01:00
(50.8326, -0.2689, "Adur"),
(52.4914, -0.69645, "Corby"),
(50.893, -4.265, "Newton St Petrock"),
(51.779, 0.128, "Harlow"),
(52.387, 0.294, "Ely"),
(50.9, -1.6, "Minstead"),
(52.43, -1.11, "South Kilworth"),
(53.117, -0.202, "Tattershall Thorpe"),
(53.351, -2.701, "Halton"),
(52.421, -0.651, "Warkton"),
(51.51, -1.547, "Lambourn"),
(52.62, -1.884, "Shenstone"),
(53.309, -1.539, "Sheffield"),
(53.322, 0.032, "Haugham"),
(51.05, -2.598, "Babcary"),
(51.158, -1.906, "Berwick St James"),
(51.867, -1.204, "Weston-on-the-Green"),
(51.034, -2.005, "Ebbesbourne Wake"),
(51.07, -0.718, "Fernhurst"),
(53.059, -0.144, "Wildmore"),
(51.473, 0.221, "Dartford"),
(51.059, 0.05, "Danehill"),
(52.253, -0.122, "Papworth Everard"),
(53.498, -0.415, "West Lindsey"),
(53.392, -0.022, "Brackenborough with Little Grimsby"),
(53.463, -0.027, "Fulstow"),
(52.766, 0.31, "Terrington St Clement"),
(53.1540, -1.8034, "Hartington Town Quarter"),
(51.8532, -0.8829, "Fleet Marston"),
(51.4785, -0.354, "London Borough of Hounslow"),
(51.9687, -0.0327, "Buckland, Hertfordshire"),
(51.0804, -2.3263, "Zeals"),
(55.7644, -4.1770, "East Kilbride"),
(51.4520, -2.6210, "Bristol"),
2019-08-18 15:56:53 +01:00
]
2021-04-16 16:22:39 +01:00
2021-04-15 12:25:14 +01:00
class QueryError(Exception):
def __init__(self, query, r):
self.query = query
self.r = r
2019-08-18 15:56:53 +01:00
app = Flask(__name__)
app.debug = True
2021-04-16 16:14:12 +01:00
def get_random_lat_lon():
2021-04-16 16:22:39 +01:00
""" Select random lat/lon within the UK """
2021-04-16 16:14:12 +01:00
south, east = 50.8520, 0.3536
north, west = 53.7984, -2.7296
mul = 10000
2019-08-18 15:56:53 +01:00
lat = random.randrange(int(south * mul), int(north * mul)) / mul
lon = random.randrange(int(west * mul), int(east * mul)) / mul
2021-04-16 16:14:12 +01:00
return lat, lon
@app.route("/random")
def random_location():
lat, lon = get_random_lat_lon()
2019-08-18 15:56:53 +01:00
elements = get_osm_elements(lat, lon)
result = do_lookup(elements, lat, lon)
2021-04-16 16:22:39 +01:00
return render_template(
"random.html", lat=lat, lon=lon, result=result, elements=elements
)
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:12 +01:00
2021-04-15 12:28:02 +01:00
@app.route("/wikidata_tag")
def wikidata_tag():
2021-04-16 16:21:38 +01:00
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
2021-04-15 12:28:02 +01:00
scotland_code = get_scotland_code(lat, lon)
if scotland_code:
rows = lookup_scottish_parish_in_wikidata(scotland_code)
hit = commons_from_rows(rows)
elements = []
result = build_dict(hit, lat, lon)
else:
elements = get_osm_elements(lat, lon)
result = do_lookup(elements, lat, lon)
2021-04-16 16:22:39 +01:00
return render_template(
"wikidata_tag.html", lat=lat, lon=lon, result=result, elements=elements
)
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
@app.route("/detail")
def detail_page():
try:
2021-04-16 16:21:38 +01:00
lat, lon = [float(request.args.get(param)) for param in ("lat", "lon")]
2021-04-15 12:28:02 +01:00
except TypeError:
2021-04-16 16:21:38 +01:00
return redirect(url_for("index"))
2021-04-15 12:28:02 +01:00
reply = lat_lon_to_wikidata(lat, lon)
2021-04-16 16:21:38 +01:00
return render_template("random.html", lat=lat, lon=lon, **reply)
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def bounding_box_area(element):
2021-04-16 16:21:38 +01:00
bbox = element["bounds"]
2021-04-15 12:28:02 +01:00
2021-04-16 16:21:38 +01:00
x = distance((bbox["maxlat"], bbox["minlon"]), (bbox["maxlat"], bbox["maxlon"]))
y = distance((bbox["minlat"], bbox["maxlon"]), (bbox["maxlat"], bbox["minlon"]))
2021-04-15 12:28:02 +01:00
return x.km * y.km
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def wd_to_qid(wd):
2021-04-16 16:21:38 +01:00
# expecting {"type": "url", "value": "https://www.wikidata.org/wiki/Q30"}
if wd["type"] == "uri":
return wd_uri_to_qid(wd["value"])
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def wd_uri_to_qid(value):
assert value.startswith(wd_entity)
2021-04-16 16:22:39 +01:00
return value[len(wd_entity) - 1 :]
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def build_dict(hit, lat, lon):
2021-04-16 16:21:38 +01:00
coords = {"lat": lat, "lon": lon}
2021-04-15 12:28:02 +01:00
if hit is None:
2019-08-18 15:56:53 +01:00
return dict(commons_cat=None, missing=True, coords=coords)
2021-04-16 16:21:38 +01:00
commons_cat = hit["commons_cat"]
url = commons_cat_start + urllib.parse.quote(commons_cat.replace(" ", "_"))
2021-04-16 16:22:39 +01:00
return dict(
commons_cat={"title": commons_cat, "url": url},
coords=coords,
admin_level=hit.get("admin_level"),
wikidata=hit["wikidata"],
)
2021-04-15 12:28:02 +01:00
def do_lookup(elements, lat, lon):
try:
hit = osm_lookup(elements, lat, lon)
except QueryError as e:
return {
2021-04-16 16:21:38 +01:00
"query": e.query,
"error": e.r.text,
"query_url": "https://query.wikidata.org/#" + e.query,
2021-04-15 12:28:02 +01:00
}
return build_dict(hit, lat, lon)
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def get_scotland_code(lat, lon):
2021-04-16 16:22:39 +01:00
conn = psycopg2.connect(
dbname="geocode", user="geocode", password="ooK3ohgh", host="localhost"
)
2021-04-15 12:28:02 +01:00
cur = conn.cursor()
2021-04-16 16:21:38 +01:00
point = f"ST_Transform(ST_SetSRID(ST_MakePoint({lon}, {lat}), 4326), 27700)"
cur.execute(f"select code, name from scotland where st_contains(geom, {point});")
2021-04-15 12:28:02 +01:00
row = cur.fetchone()
# expand search, disabled for now 2020-04-20
if not row:
2021-04-16 16:22:39 +01:00
cur.execute(
f"select code, name from scotland where ST_DWithin(geom, {point}, 100);"
)
2021-04-15 12:28:02 +01:00
row = cur.fetchone()
conn.close()
if row:
return row[0]
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def wdqs_geosearch_query(lat, lon):
if isinstance(lat, float):
2021-04-16 16:22:39 +01:00
lat = f"{lat:f}"
2021-04-15 12:28:02 +01:00
if isinstance(lon, float):
2021-04-16 16:22:39 +01:00
lon = f"{lon:f}"
2021-04-15 12:28:02 +01:00
2021-04-16 16:22:39 +01:00
query_template = """
2021-04-15 12:28:02 +01:00
SELECT DISTINCT ?item ?distance ?itemLabel ?isa ?isaLabel ?commonsCat ?commonsSiteLink WHERE {
{
SELECT DISTINCT ?item ?location ?distance ?isa WHERE {
?item wdt:P31/wdt:P279* wd:Q486972.
?item wdt:P31 ?isa .
SERVICE wikibase:around {
?item wdt:P625 ?location.
bd:serviceParam wikibase:center "Point(LON LAT)"^^geo:wktLiteral;
wikibase:radius 5;
wikibase:distance ?distance.
}
}
}
MINUS { ?item wdt:P582 ?endTime . }
OPTIONAL { ?item wdt:P373 ?commonsCat. }
OPTIONAL { ?commonsSiteLink schema:about ?item;
schema:isPartOf <https://commons.wikimedia.org/>. }
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
2021-04-16 16:22:39 +01:00
} ORDER BY (?distance)"""
2021-04-15 12:28:02 +01:00
2021-04-16 16:22:39 +01:00
query = query_template.replace("LAT", lat).replace("LON", lon)
2021-04-15 12:28:02 +01:00
reply = wdqs(query)
2021-04-16 16:22:39 +01:00
return reply["results"]["bindings"]
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def wdqs_geosearch(lat, lon):
default_max_dist = 1
rows = wdqs_geosearch_query(lat, lon)
max_dist = {
2021-04-16 16:22:39 +01:00
"Q188509": 1, # suburb
"Q3957": 2, # town
"Q532": 1, # village
"Q5084": 1, # hamlet
"Q515": 2, # city
"Q1549591": 3, # big city
2021-04-15 12:28:02 +01:00
}
for row in rows:
2021-04-16 16:22:39 +01:00
isa = wd_uri_to_qid(row["isa"]["value"])
2021-04-15 12:28:02 +01:00
2021-04-16 16:22:39 +01:00
if (
"commonsCat" not in row
and "commonsSiteLink" not in row
and isa not in max_dist
):
2021-04-15 12:28:02 +01:00
continue
2021-04-16 16:22:39 +01:00
distance = float(row["distance"]["value"])
2021-04-15 12:28:02 +01:00
if distance > max_dist.get(isa, default_max_dist):
continue
2021-04-16 16:22:39 +01:00
if "commonsCat" not in row and "commonsSiteLink" not in row:
2021-04-15 12:28:02 +01:00
break
return row
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def lat_lon_to_wikidata(lat, lon):
scotland_code = get_scotland_code(lat, lon)
if scotland_code:
rows = lookup_scottish_parish_in_wikidata(scotland_code)
hit = commons_from_rows(rows)
elements = []
result = build_dict(hit, lat, lon)
2021-04-16 16:22:39 +01:00
return {"elements": elements, "result": result}
2021-04-15 12:28:02 +01:00
elements = get_osm_elements(lat, lon)
result = do_lookup(elements, lat, lon)
# special case because the City of London is admin_level=6 in OSM
2021-04-16 16:22:39 +01:00
if result["wikidata"] == city_of_london_qid:
return {"elements": elements, "result": result}
2021-04-15 12:28:02 +01:00
2021-04-16 16:22:39 +01:00
admin_level = result["admin_level"]
2021-04-15 12:28:02 +01:00
if not admin_level or admin_level >= 7:
2021-04-16 16:22:39 +01:00
return {"elements": elements, "result": result}
2021-04-15 12:28:02 +01:00
row = wdqs_geosearch(lat, lon)
if row:
hit = commons_from_rows([row])
elements = []
result = build_dict(hit, lat, lon)
2021-04-16 16:22:39 +01:00
return {"elements": elements, "result": result}
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
@app.route("/")
def index():
2021-04-16 16:21:38 +01:00
q = request.args.get("q")
2021-04-15 12:28:02 +01:00
if q and q.strip():
2021-04-16 16:21:38 +01:00
lat, lon = [v.strip() for v in q.split(",", 1)]
return redirect(url_for("detail_page", lat=lat, lon=lon))
2021-04-15 12:28:02 +01:00
2021-04-16 16:21:38 +01:00
lat = request.args.get("lat")
lon = request.args.get("lon")
2019-08-18 15:56:53 +01:00
if lat is None or lon is None:
2021-04-15 12:28:02 +01:00
samples.sort(key=lambda row: row[2])
2021-04-16 16:21:38 +01:00
return render_template("index.html", samples=samples)
2019-08-18 15:56:53 +01:00
2021-04-16 16:21:38 +01:00
return jsonify(lat_lon_to_wikidata(lat, lon)["result"])
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def wikidata_api_call(params):
2021-04-16 16:21:38 +01:00
return requests.get(
"https://www.wikidata.org/w/api.php",
params={"format": "json", "formatversion": 2, **params},
2021-04-16 16:22:39 +01:00
headers=headers,
2021-04-16 16:21:38 +01:00
).json()
2019-08-18 15:56:53 +01:00
def get_entity(qid):
2021-04-16 16:21:38 +01:00
json_data = wikidata_api_call({"action": "wbgetentities", "ids": qid})
2019-08-18 15:56:53 +01:00
try:
2021-04-16 16:21:38 +01:00
entity = list(json_data["entities"].values())[0]
2019-08-18 15:56:53 +01:00
except KeyError:
return
2021-04-16 16:21:38 +01:00
if "missing" not in entity:
2019-08-18 15:56:53 +01:00
return entity
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def qid_to_commons_category(qid):
entity = get_entity(qid)
try:
2021-04-16 16:21:38 +01:00
commons_cat = entity["claims"]["P373"][0]["mainsnak"]["datavalue"]["value"]
2019-08-18 15:56:53 +01:00
except Exception:
commons_cat = None
return commons_cat
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def wdqs(query):
2021-04-16 16:21:38 +01:00
r = requests.post(
2021-04-16 16:22:39 +01:00
wikidata_query_api_url, data={"query": query, "format": "json"}, headers=headers
2021-04-16 16:21:38 +01:00
)
2019-08-18 15:56:53 +01:00
2021-04-15 12:25:14 +01:00
try:
return r.json()
except simplejson.errors.JSONDecodeError:
raise QueryError(query, r)
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def run_query(oql, error_on_rate_limit=True):
2021-04-16 16:22:39 +01:00
return requests.post(
OVERPASS_URL + "/api/interpreter", data=oql.encode("utf-8"), headers=headers
)
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def get_elements(oql):
2021-04-16 16:22:39 +01:00
return run_query(oql).json()["elements"]
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def is_in_lat_lon(lat, lon):
2021-04-16 16:22:39 +01:00
oql = f"""
2019-08-18 15:56:53 +01:00
[out:json][timeout:25];
is_in({lat},{lon})->.a;
(way(pivot.a); rel(pivot.a););
2021-04-16 16:22:39 +01:00
out bb tags qt;"""
2019-08-18 15:56:53 +01:00
return run_query(oql)
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def lookup_scottish_parish_in_wikidata(code):
2021-04-16 16:22:39 +01:00
query = """
2021-04-15 12:28:02 +01:00
SELECT ?item ?itemLabel ?commonsSiteLink ?commonsCat WHERE {
?item wdt:P528 "CODE" .
?item wdt:P31 wd:Q5124673 .
OPTIONAL { ?commonsSiteLink schema:about ?item ;
schema:isPartOf <https://commons.wikimedia.org/> }
OPTIONAL { ?item wdt:P373 ?commonsCat }
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
2021-04-16 16:22:39 +01:00
""".replace(
"CODE", code
)
2021-04-15 12:28:02 +01:00
reply = wdqs(query)
2021-04-16 16:22:39 +01:00
return reply["results"]["bindings"]
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def lookup_gss_in_wikidata(gss):
2021-04-16 16:22:39 +01:00
query = """
2019-08-18 15:56:53 +01:00
SELECT ?item ?itemLabel ?commonsSiteLink ?commonsCat WHERE {
2021-04-15 12:28:02 +01:00
?item wdt:P836 GSS .
2019-08-18 15:56:53 +01:00
OPTIONAL { ?commonsSiteLink schema:about ?item ;
schema:isPartOf <https://commons.wikimedia.org/> }
OPTIONAL { ?item wdt:P373 ?commonsCat }
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
2021-04-16 16:22:39 +01:00
""".replace(
"GSS", repr(gss)
)
2021-04-15 12:28:02 +01:00
reply = wdqs(query)
2021-04-16 16:22:39 +01:00
return reply["results"]["bindings"]
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def lookup_wikidata_by_name(name, lat, lon):
2021-04-16 16:22:39 +01:00
query = (
"""
2021-04-15 12:28:02 +01:00
SELECT DISTINCT ?item ?itemLabel ?commonsSiteLink ?commonsCat WHERE {
?item rdfs:label LABEL@en .
FILTER NOT EXISTS { ?item wdt:P31 wd:Q17362920 } .# ignore Wikimedia duplicated page
OPTIONAL { ?commonsSiteLink schema:about ?item ;
schema:isPartOf <https://commons.wikimedia.org/> }
OPTIONAL { ?item wdt:P373 ?commonsCat }
?item wdt:P625 ?coords .
FILTER(geof:distance(?coords, "Point(LON LAT)"^^geo:wktLiteral) < 10)
FILTER(?commonsCat || ?commonsSiteLink)
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
2021-04-16 16:22:39 +01:00
""".replace(
"LABEL", repr(name)
)
.replace("LAT", str(lat))
.replace("LON", str(lon))
)
2021-04-15 12:28:02 +01:00
2019-08-18 15:56:53 +01:00
reply = wdqs(query)
2021-04-16 16:22:39 +01:00
return reply["results"]["bindings"]
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def unescape_title(t):
2021-04-16 16:21:38 +01:00
return urllib.parse.unquote(t.replace("_", " "))
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def commons_from_rows(rows):
2019-08-18 15:56:53 +01:00
for row in rows:
2021-04-16 16:21:38 +01:00
if "commonsCat" in row:
qid = wd_to_qid(row["item"])
2021-04-16 16:22:39 +01:00
return {"wikidata": qid, "commons_cat": row["commonsCat"]["value"]}
2021-04-16 16:21:38 +01:00
if "commonsSiteLink" in row:
site_link = row["commonsSiteLink"]["value"]
qid = wd_to_qid(row["item"])
2021-04-16 16:22:39 +01:00
cat = unescape_title(site_link[len(commons_cat_start) :])
2021-04-16 16:21:38 +01:00
return {"wikidata": qid, "commons_cat": cat}
2021-04-15 12:28:02 +01:00
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def get_commons_cat_from_gss(gss):
2021-04-16 16:21:38 +01:00
return commons_from_rows(lookup_gss_in_wikidata(gss))
2019-08-18 15:56:53 +01:00
2021-04-16 16:14:32 +01:00
2019-08-18 15:56:53 +01:00
def get_osm_elements(lat, lon):
2021-04-16 16:21:38 +01:00
filename = f"cache/{lat}_{lon}.json"
2019-08-18 15:56:53 +01:00
if use_cache and os.path.exists(filename):
2021-04-16 16:21:38 +01:00
elements = json.load(open(filename))["elements"]
2019-08-18 15:56:53 +01:00
else:
r = is_in_lat_lon(lat, lon)
if use_cache:
2021-04-16 16:21:38 +01:00
open(filename, "wb").write(r.content)
elements = r.json()["elements"]
2019-08-18 15:56:53 +01:00
return elements
2021-04-16 16:14:32 +01:00
2021-04-15 12:28:02 +01:00
def osm_lookup(elements, lat, lon):
elements.sort(key=lambda e: bounding_box_area(e))
for e in elements:
2021-04-16 16:21:38 +01:00
if "tags" not in e:
2021-04-15 12:28:02 +01:00
continue
2021-04-16 16:21:38 +01:00
tags = e["tags"]
admin_level_tag = tags.get("admin_level")
2021-04-16 16:22:39 +01:00
admin_level = (
int(admin_level_tag)
if admin_level_tag and admin_level_tag.isdigit()
else None
)
2021-04-16 16:21:38 +01:00
if not admin_level and tags.get("boundary") != "political":
2021-04-15 12:28:02 +01:00
continue
2021-04-16 16:21:38 +01:00
if "wikidata" in tags:
qid = tags["wikidata"]
2021-04-15 12:28:02 +01:00
commons = qid_to_commons_category(qid)
if commons:
return {
2021-04-16 16:21:38 +01:00
"wikidata": qid,
"commons_cat": commons,
"admin_level": admin_level,
2021-04-15 12:28:02 +01:00
}
2021-04-16 16:21:38 +01:00
gss = tags.get("ref:gss")
2021-04-15 12:28:02 +01:00
if gss:
ret = get_commons_cat_from_gss(gss)
if ret:
2021-04-16 16:21:38 +01:00
ret["admin_level"] = admin_level
2021-04-15 12:28:02 +01:00
return ret
2021-04-16 16:21:38 +01:00
name = tags.get("name")
2021-04-15 12:28:02 +01:00
if not name:
2019-08-18 15:56:53 +01:00
continue
2021-04-16 16:21:38 +01:00
if name.endswith(" CP"):
2021-04-15 12:28:02 +01:00
name = name[:-3]
rows = lookup_wikidata_by_name(name, lat, lon)
if len(rows) == 1:
ret = commons_from_rows(rows)
if ret:
2021-04-16 16:21:38 +01:00
ret["admin_level"] = admin_level
2021-04-15 12:28:02 +01:00
return ret
2021-04-16 16:21:38 +01:00
has_wikidata_tag = [e["tags"] for e in elements if "wikidata" in e["tags"]]
2021-04-15 12:28:02 +01:00
if len(has_wikidata_tag) != 1:
return
2021-04-16 16:21:38 +01:00
qid = has_wikidata_tag[0]["wikidata"]
2021-04-15 12:28:02 +01:00
return {
2021-04-16 16:21:38 +01:00
"wikidata": qid,
"commons_cat": qid_to_commons_category(qid),
"admin_level": admin_level,
2021-04-15 12:28:02 +01:00
}
2019-08-18 15:56:53 +01:00
2021-04-16 16:21:38 +01:00
if __name__ == "__main__":
app.run(host="0.0.0.0")