2019-09-12 19:51:05 +01:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
2019-09-27 11:02:24 +01:00
|
|
|
from flask import Flask, render_template, url_for, redirect, request, g, jsonify, session
|
2019-10-10 10:58:42 +01:00
|
|
|
from depicts import (utils, wdqs, commons, mediawiki, painting, database,
|
|
|
|
wd_catalog, human, wikibase, wikidata_oauth, wikidata_edit)
|
2019-09-29 20:19:40 +01:00
|
|
|
from depicts.pager import Pagination, init_pager
|
2019-09-30 10:19:12 +01:00
|
|
|
from depicts.model import (DepictsItem, DepictsItemAltLabel, Edit, PaintingItem,
|
|
|
|
Language)
|
2019-09-29 21:14:41 +01:00
|
|
|
from depicts.error_mail import setup_error_mail
|
2019-09-27 11:02:24 +01:00
|
|
|
from requests_oauthlib import OAuth1Session
|
2019-09-29 08:27:35 +01:00
|
|
|
from werkzeug.exceptions import InternalServerError
|
|
|
|
from werkzeug.debug.tbtools import get_current_traceback
|
2019-09-29 11:23:07 +01:00
|
|
|
from sqlalchemy import func, distinct
|
2019-09-30 10:19:12 +01:00
|
|
|
from collections import defaultdict
|
2019-09-12 19:51:05 +01:00
|
|
|
import json
|
|
|
|
import os
|
2019-09-14 13:26:16 +01:00
|
|
|
import locale
|
2019-09-16 08:59:53 +01:00
|
|
|
import random
|
2019-09-14 13:26:16 +01:00
|
|
|
|
|
|
|
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
|
2019-09-27 11:02:24 +01:00
|
|
|
user_agent = 'Mozilla/5.0 (X11; Linux i586; rv:32.0) Gecko/20160101 Firefox/32.0'
|
2019-09-12 19:51:05 +01:00
|
|
|
|
|
|
|
app = Flask(__name__)
|
2019-09-25 13:40:15 +01:00
|
|
|
app.config.from_object('config.default')
|
|
|
|
database.init_db(app.config['DB_URL'])
|
2019-09-29 20:19:40 +01:00
|
|
|
init_pager(app)
|
2019-09-29 21:14:41 +01:00
|
|
|
setup_error_mail(app)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
|
|
|
find_more_props = {
|
|
|
|
'P135': 'movement',
|
|
|
|
'P136': 'genre',
|
|
|
|
'P170': 'artist',
|
|
|
|
'P195': 'collection',
|
|
|
|
'P276': 'location',
|
|
|
|
'P495': 'country of origin',
|
|
|
|
'P127': 'owned by',
|
|
|
|
'P179': 'part of the series',
|
2019-09-13 17:16:16 +01:00
|
|
|
'P921': 'main subject',
|
|
|
|
'P186': 'material used',
|
|
|
|
'P88': 'commissioned by',
|
|
|
|
'P1028': 'donated by',
|
|
|
|
'P1071': 'location of final assembly',
|
|
|
|
'P138': 'named after',
|
|
|
|
'P1433': 'published in',
|
|
|
|
'P144': 'based on',
|
|
|
|
'P2079': 'fabrication method',
|
|
|
|
'P2348': 'time period',
|
|
|
|
'P361': 'part of',
|
|
|
|
'P608': 'exhibition history',
|
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
# possible future props
|
|
|
|
# 'P571': 'inception',
|
2019-09-13 17:16:16 +01:00
|
|
|
# 'P166': 'award received', (only 2)
|
|
|
|
# 'P1419': 'shape', (only 2)
|
|
|
|
# 'P123': 'publisher', (only 1)
|
2019-09-12 19:51:05 +01:00
|
|
|
}
|
|
|
|
|
2019-09-30 10:19:12 +01:00
|
|
|
@app.teardown_appcontext
|
|
|
|
def shutdown_session(exception=None):
|
|
|
|
database.session.remove()
|
|
|
|
|
2019-09-29 08:27:35 +01:00
|
|
|
@app.errorhandler(InternalServerError)
|
|
|
|
def exception_handler(e):
|
|
|
|
tb = get_current_traceback()
|
|
|
|
return render_template('show_error.html', tb=tb), 500
|
|
|
|
|
2019-09-16 08:59:53 +01:00
|
|
|
@app.template_global()
|
|
|
|
def set_url_args(**new_args):
|
|
|
|
args = request.view_args.copy()
|
|
|
|
args.update(request.args)
|
|
|
|
args.update(new_args)
|
|
|
|
args = {k: v for k, v in args.items() if v is not None}
|
|
|
|
return url_for(request.endpoint, **args)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-27 20:23:01 +01:00
|
|
|
@app.template_global()
|
|
|
|
def current_url():
|
|
|
|
args = request.view_args.copy()
|
|
|
|
args.update(request.args)
|
|
|
|
return url_for(request.endpoint, **args)
|
|
|
|
|
2019-09-16 08:59:53 +01:00
|
|
|
@app.before_request
|
|
|
|
def init_profile():
|
|
|
|
g.profiling = []
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-04 12:16:16 +01:00
|
|
|
@app.route('/user/settings')
|
|
|
|
def user_settings():
|
|
|
|
session['no_find_more'] = not session.get('no_find_more')
|
|
|
|
display = {True: 'on', False: 'off'}[not session['no_find_more']]
|
|
|
|
|
|
|
|
return 'flipped. find more is ' + display
|
|
|
|
|
2019-10-04 16:56:06 +01:00
|
|
|
def no_existing_edit(item_id, depicts_id):
|
2019-10-04 17:00:25 +01:00
|
|
|
q = Edit.query.filter_by(painting_id=item_id, depicts_id=depicts_id)
|
2019-10-04 16:56:06 +01:00
|
|
|
return q.count() == 0
|
|
|
|
|
2019-09-27 16:07:37 +01:00
|
|
|
@app.route('/save/Q<int:item_id>', methods=['POST'])
|
|
|
|
def save(item_id):
|
|
|
|
depicts = request.form.getlist('depicts')
|
2019-10-09 16:09:52 +01:00
|
|
|
username = wikidata_oauth.get_username()
|
2019-09-27 16:53:17 +01:00
|
|
|
assert username
|
|
|
|
|
2019-10-09 16:09:52 +01:00
|
|
|
token = wikidata_oauth.get_token()
|
2019-09-27 16:53:17 +01:00
|
|
|
|
2019-09-29 13:15:34 +01:00
|
|
|
painting_item = PaintingItem.query.get(item_id)
|
|
|
|
if painting_item is None:
|
|
|
|
painting_entity = mediawiki.get_entity_with_cache(f'Q{item_id}')
|
2019-10-09 15:30:48 +01:00
|
|
|
label = wikibase.get_entity_label(painting_entity)
|
2019-09-29 13:15:34 +01:00
|
|
|
painting_item = PaintingItem(item_id=item_id, label=label, entity=painting_entity)
|
|
|
|
database.session.add(painting_item)
|
2019-09-29 13:17:36 +01:00
|
|
|
database.session.commit()
|
2019-09-29 13:15:34 +01:00
|
|
|
|
2019-09-27 16:53:17 +01:00
|
|
|
for depicts_qid in depicts:
|
|
|
|
depicts_id = int(depicts_qid[1:])
|
2019-10-04 12:02:45 +01:00
|
|
|
|
|
|
|
depicts_item = DepictsItem.query.get(depicts_id)
|
|
|
|
if depicts_item is None:
|
2019-10-10 09:42:16 +01:00
|
|
|
depicts_item = wikidata_edit.create_depicts_item(depicts_id)
|
2019-10-04 12:02:45 +01:00
|
|
|
database.session.add(depicts_item)
|
|
|
|
database.session.commit()
|
|
|
|
|
2019-10-04 16:56:06 +01:00
|
|
|
assert no_existing_edit(item_id, depicts_id)
|
|
|
|
|
|
|
|
for depicts_qid in depicts:
|
|
|
|
depicts_id = int(depicts_qid[1:])
|
2019-09-27 16:53:17 +01:00
|
|
|
r = create_claim(item_id, depicts_id, token)
|
|
|
|
reply = r.json()
|
|
|
|
if 'error' in reply:
|
|
|
|
return 'error:' + r.text
|
|
|
|
print(r.text)
|
2019-09-29 11:07:34 +01:00
|
|
|
saved = r.json()
|
|
|
|
lastrevid = saved['pageinfo']['lastrevid']
|
|
|
|
assert saved['success'] == 1
|
2019-09-27 16:53:17 +01:00
|
|
|
edit = Edit(username=username,
|
|
|
|
painting_id=item_id,
|
2019-09-29 11:07:34 +01:00
|
|
|
depicts_id=depicts_id,
|
|
|
|
lastrevid=lastrevid)
|
2019-09-27 16:53:17 +01:00
|
|
|
database.session.add(edit)
|
|
|
|
database.session.commit()
|
|
|
|
|
|
|
|
return redirect(url_for('next_page', item_id=item_id))
|
2019-09-27 16:07:37 +01:00
|
|
|
|
2019-09-13 17:16:16 +01:00
|
|
|
@app.route("/property/P<int:property_id>")
|
|
|
|
def property_query_page(property_id):
|
|
|
|
pid = f'P{property_id}'
|
2019-09-14 13:26:16 +01:00
|
|
|
sort = request.args.get('sort')
|
|
|
|
sort_by_name = sort and sort.lower().strip() == 'name'
|
2019-09-13 17:16:16 +01:00
|
|
|
|
2019-10-09 15:27:35 +01:00
|
|
|
q = render_template('query/property.sparql', pid=pid)
|
2019-09-16 08:59:53 +01:00
|
|
|
rows = wdqs.run_query_with_cache(q, name=pid)
|
2019-09-14 13:26:16 +01:00
|
|
|
|
|
|
|
no_label_qid = [row['object']['value'].rpartition('/')[2]
|
|
|
|
for row in rows
|
|
|
|
if 'objectLabel' not in row and '/' in row['object']['value']]
|
|
|
|
|
|
|
|
if no_label_qid:
|
|
|
|
extra_label = get_labels(no_label_qid, name=f'{pid}_extra_labels')
|
|
|
|
if extra_label:
|
|
|
|
for row in rows:
|
|
|
|
item = row['object']['value']
|
|
|
|
if 'objectLabel' in row or '/' not in item:
|
|
|
|
continue
|
|
|
|
qid = item.rpartition('/')[2]
|
|
|
|
if extra_label.get(qid):
|
|
|
|
row['objectLabel'] = {'value': extra_label[qid]}
|
|
|
|
|
|
|
|
if sort_by_name:
|
|
|
|
# put rows with no English label at the end
|
|
|
|
no_label = [row for row in rows if 'objectLabel' not in row]
|
|
|
|
has_label = sorted((row for row in rows if 'objectLabel' in row),
|
|
|
|
key=lambda row: locale.strxfrm(row['objectLabel']['value']))
|
|
|
|
rows = has_label + no_label
|
2019-09-13 17:16:16 +01:00
|
|
|
label = find_more_props[pid]
|
|
|
|
|
2019-09-14 13:26:16 +01:00
|
|
|
return render_template('property.html',
|
|
|
|
label=label,
|
|
|
|
order=('name' if sort_by_name else 'count'),
|
|
|
|
pid=pid,
|
|
|
|
rows=rows)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-27 14:13:28 +01:00
|
|
|
@app.route('/')
|
|
|
|
def start():
|
|
|
|
return random_painting()
|
2019-10-10 21:00:13 +01:00
|
|
|
username = wikidata_oauth.get_username()
|
|
|
|
username = None
|
|
|
|
return render_template('start.html', username=username)
|
2019-09-27 14:13:28 +01:00
|
|
|
|
|
|
|
@app.route('/next')
|
2019-09-16 08:59:53 +01:00
|
|
|
def random_painting():
|
2019-10-09 15:27:35 +01:00
|
|
|
q = render_template('query/painting_no_depicts.sparql')
|
|
|
|
rows = wdqs.run_query_with_cache(q)
|
2019-10-07 10:34:53 +01:00
|
|
|
has_depicts = True
|
|
|
|
while has_depicts:
|
|
|
|
item_id = wdqs.row_id(random.choice(rows))
|
|
|
|
if PaintingItem.query.get(item_id):
|
|
|
|
continue
|
2019-10-09 16:25:41 +01:00
|
|
|
entity = mediawiki.get_entity_with_cache(f'Q{item_id}', refresh=True)
|
|
|
|
en_label = wikibase.get_en_label(entity)
|
|
|
|
if en_label and en_label.startswith('Page from '):
|
|
|
|
# example: Q60467422
|
|
|
|
# title: Page from Tales of a Parrot (Tuti-nama): text page
|
|
|
|
# this is not a painting
|
|
|
|
continue
|
|
|
|
has_depicts = 'P180' in entity['claims']
|
2019-10-07 10:34:53 +01:00
|
|
|
|
2019-10-09 16:25:41 +01:00
|
|
|
session[f'Q{item_id}'] = 'from redirect'
|
2019-09-16 08:59:53 +01:00
|
|
|
return redirect(url_for('item_page', item_id=item_id))
|
|
|
|
|
2019-09-27 11:02:24 +01:00
|
|
|
@app.route('/oauth/start')
|
|
|
|
def start_oauth():
|
2019-09-27 20:15:24 +01:00
|
|
|
next_page = request.args.get('next')
|
|
|
|
if next_page:
|
|
|
|
session['after_login'] = next_page
|
|
|
|
|
2019-09-27 11:02:24 +01:00
|
|
|
client_key = app.config['CLIENT_KEY']
|
|
|
|
client_secret = app.config['CLIENT_SECRET']
|
|
|
|
base_url = 'https://www.wikidata.org/w/index.php'
|
|
|
|
request_token_url = base_url + '?title=Special%3aOAuth%2finitiate'
|
|
|
|
|
|
|
|
oauth = OAuth1Session(client_key,
|
|
|
|
client_secret=client_secret,
|
|
|
|
callback_uri='oob')
|
|
|
|
fetch_response = oauth.fetch_request_token(request_token_url)
|
|
|
|
|
|
|
|
session['owner_key'] = fetch_response.get('oauth_token')
|
|
|
|
session['owner_secret'] = fetch_response.get('oauth_token_secret')
|
|
|
|
|
|
|
|
base_authorization_url = 'https://www.wikidata.org/wiki/Special:OAuth/authorize'
|
|
|
|
authorization_url = oauth.authorization_url(base_authorization_url,
|
|
|
|
oauth_consumer_key=client_key)
|
|
|
|
return redirect(authorization_url)
|
|
|
|
|
|
|
|
@app.route("/oauth/callback", methods=["GET"])
|
|
|
|
def oauth_callback():
|
|
|
|
base_url = 'https://www.wikidata.org/w/index.php'
|
|
|
|
client_key = app.config['CLIENT_KEY']
|
|
|
|
client_secret = app.config['CLIENT_SECRET']
|
|
|
|
|
|
|
|
oauth = OAuth1Session(client_key,
|
|
|
|
client_secret=client_secret,
|
|
|
|
resource_owner_key=session['owner_key'],
|
|
|
|
resource_owner_secret=session['owner_secret'])
|
|
|
|
|
|
|
|
oauth_response = oauth.parse_authorization_response(request.url)
|
|
|
|
verifier = oauth_response.get('oauth_verifier')
|
|
|
|
access_token_url = base_url + '?title=Special%3aOAuth%2ftoken'
|
|
|
|
oauth = OAuth1Session(client_key,
|
|
|
|
client_secret=client_secret,
|
|
|
|
resource_owner_key=session['owner_key'],
|
|
|
|
resource_owner_secret=session['owner_secret'],
|
|
|
|
verifier=verifier)
|
|
|
|
|
|
|
|
oauth_tokens = oauth.fetch_access_token(access_token_url)
|
|
|
|
session['owner_key'] = oauth_tokens.get('oauth_token')
|
|
|
|
session['owner_secret'] = oauth_tokens.get('oauth_token_secret')
|
|
|
|
|
2019-09-27 20:19:29 +01:00
|
|
|
next_page = session.get('after_login')
|
2019-09-27 20:15:24 +01:00
|
|
|
return redirect(next_page) if next_page else random_painting()
|
2019-09-27 11:02:24 +01:00
|
|
|
|
2019-09-27 20:19:29 +01:00
|
|
|
@app.route('/oauth/disconnect')
|
|
|
|
def oauth_disconnect():
|
|
|
|
for key in 'owner_key', 'owner_secret', 'username', 'after_login':
|
|
|
|
if key in session:
|
|
|
|
del session[key]
|
|
|
|
return random_painting()
|
|
|
|
|
2019-09-27 16:53:17 +01:00
|
|
|
def create_claim(painting_id, depicts_id, token):
|
|
|
|
painting_qid = f'Q{painting_id}'
|
|
|
|
value = json.dumps({'entity-type': 'item',
|
|
|
|
'numeric-id': depicts_id})
|
|
|
|
params = {
|
|
|
|
'action': 'wbcreateclaim',
|
|
|
|
'entity': painting_qid,
|
|
|
|
'property': 'P180',
|
|
|
|
'snaktype': 'value',
|
|
|
|
'value': value,
|
|
|
|
'token': token,
|
|
|
|
'format': 'json',
|
|
|
|
'formatversion': 2,
|
|
|
|
}
|
2019-10-09 16:09:52 +01:00
|
|
|
return wikidata_oauth.api_post_request(params)
|
2019-09-27 11:02:24 +01:00
|
|
|
|
2019-09-25 13:40:15 +01:00
|
|
|
def image_with_cache(qid, image_filename, width):
|
|
|
|
filename = f'cache/{qid}_{width}_image.json'
|
|
|
|
if os.path.exists(filename):
|
|
|
|
detail = json.load(open(filename))
|
|
|
|
else:
|
|
|
|
detail = commons.image_detail([image_filename], thumbwidth=width)
|
|
|
|
json.dump(detail, open(filename, 'w'), indent=2)
|
|
|
|
|
|
|
|
return detail[image_filename]
|
|
|
|
|
2019-10-07 14:12:30 +01:00
|
|
|
def existing_depicts_from_entity(entity):
|
|
|
|
if 'P180' not in entity['claims']:
|
|
|
|
return []
|
|
|
|
existing = []
|
|
|
|
for claim in entity['claims']['P180']:
|
|
|
|
item_id = claim['mainsnak']['datavalue']['value']['numeric-id']
|
|
|
|
|
|
|
|
item = DepictsItem.query.get(item_id)
|
|
|
|
if item:
|
|
|
|
d = {
|
|
|
|
'label': item.label,
|
|
|
|
'description': item.description,
|
|
|
|
'qid': item.qid,
|
|
|
|
'count': item.count,
|
|
|
|
'existing': True,
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
qid = f'Q{item_id}'
|
|
|
|
d = {
|
|
|
|
'label': 'not in db',
|
|
|
|
'description': '',
|
|
|
|
'qid': qid,
|
|
|
|
'count': 0,
|
|
|
|
'existing': True,
|
|
|
|
}
|
|
|
|
existing.append(d)
|
|
|
|
return existing
|
|
|
|
|
2019-10-10 10:58:42 +01:00
|
|
|
def get_institution(entity, other):
|
|
|
|
if 'P276' in entity['claims']:
|
|
|
|
location = wikibase.first_datavalue(entity, 'P276')['id']
|
|
|
|
return other[location]
|
|
|
|
elif 'P195' in entity['claims']:
|
|
|
|
collection = wikibase.first_datavalue(entity, 'P195')['id']
|
|
|
|
return other[collection]
|
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
@app.route("/item/Q<int:item_id>")
|
|
|
|
def item_page(item_id):
|
|
|
|
qid = f'Q{item_id}'
|
2019-09-16 08:59:53 +01:00
|
|
|
item = painting.Painting(qid)
|
2019-10-09 16:25:41 +01:00
|
|
|
from_redirect = qid in session and session.pop(qid) == 'from redirect'
|
|
|
|
entity = mediawiki.get_entity_with_cache(qid, refresh=not from_redirect)
|
2019-10-07 14:12:30 +01:00
|
|
|
|
|
|
|
existing_depicts = existing_depicts_from_entity(entity)
|
2019-09-16 08:59:53 +01:00
|
|
|
|
|
|
|
width = 800
|
|
|
|
image_filename = item.image_filename
|
2019-10-10 19:52:51 +01:00
|
|
|
if image_filename:
|
|
|
|
image = image_with_cache(qid, image_filename, width)
|
|
|
|
else:
|
|
|
|
image = None
|
2019-09-25 13:40:15 +01:00
|
|
|
|
|
|
|
# hits = item.run_query()
|
2019-09-30 10:19:12 +01:00
|
|
|
label_and_language = get_entity_label_and_language(entity)
|
2019-09-30 14:23:19 +01:00
|
|
|
if label_and_language:
|
|
|
|
label = label_and_language['label']
|
|
|
|
else:
|
|
|
|
label = None
|
2019-09-25 13:40:15 +01:00
|
|
|
other = get_other(item.entity)
|
|
|
|
|
2019-10-08 11:11:15 +01:00
|
|
|
people = human.from_name(label) if label else None
|
|
|
|
|
2019-09-29 11:08:29 +01:00
|
|
|
painting_item = PaintingItem.query.get(item_id)
|
2019-09-29 11:07:34 +01:00
|
|
|
if painting_item is None:
|
|
|
|
painting_item = PaintingItem(item_id=item_id, label=label, entity=entity)
|
|
|
|
database.session.add(painting_item)
|
|
|
|
|
2019-10-10 10:58:42 +01:00
|
|
|
catalog = wd_catalog.get_catalog_from_painting(entity)
|
|
|
|
if not catalog.get('institution'):
|
|
|
|
catalog['institution'] = get_institution(entity, other)
|
2019-09-16 08:59:53 +01:00
|
|
|
|
2019-09-30 14:23:19 +01:00
|
|
|
label_languages = label_and_language['languages'] if label_and_language else []
|
2019-09-30 10:19:12 +01:00
|
|
|
show_translation_links = all(lang.code != 'en' for lang in label_languages)
|
2019-09-16 08:59:53 +01:00
|
|
|
return render_template('item.html',
|
|
|
|
qid=qid,
|
2019-09-27 16:07:37 +01:00
|
|
|
item_id=item_id,
|
2019-09-16 08:59:53 +01:00
|
|
|
item=item,
|
2019-09-27 11:02:24 +01:00
|
|
|
catalog=catalog,
|
2019-09-25 13:40:15 +01:00
|
|
|
labels=find_more_props,
|
|
|
|
entity=item.entity,
|
2019-10-09 16:09:52 +01:00
|
|
|
username=wikidata_oauth.get_username(),
|
2019-09-25 13:40:15 +01:00
|
|
|
label=label,
|
2019-09-30 10:19:12 +01:00
|
|
|
label_languages=label_languages,
|
|
|
|
show_translation_links=show_translation_links,
|
2019-10-07 14:12:30 +01:00
|
|
|
existing_depicts=existing_depicts,
|
2019-09-25 13:40:15 +01:00
|
|
|
image=image,
|
2019-10-08 11:11:15 +01:00
|
|
|
people=people,
|
2019-09-25 13:40:15 +01:00
|
|
|
other=other,
|
|
|
|
# hits=hits,
|
2019-09-16 08:59:53 +01:00
|
|
|
title=item.display_title)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-30 10:19:12 +01:00
|
|
|
def get_languages(codes):
|
|
|
|
return Language.query.filter(Language.wikimedia_language_code.in_(codes))
|
|
|
|
|
|
|
|
def get_entity_label_and_language(entity):
|
|
|
|
'''
|
|
|
|
Look for a useful label and return it with a list of languages that have that label.
|
|
|
|
|
|
|
|
If the entity has a label in English return it.
|
|
|
|
|
|
|
|
Otherwise check if all languages have the same label, if so then return it.
|
|
|
|
'''
|
|
|
|
|
|
|
|
group_by_label = defaultdict(set)
|
|
|
|
for language, l in entity['labels'].items():
|
|
|
|
group_by_label[l['value']].add(language)
|
|
|
|
|
|
|
|
if 'en' in entity['labels']:
|
|
|
|
label = entity['labels']['en']['value']
|
|
|
|
return {'label': label,
|
|
|
|
'languages': get_languages(group_by_label[label])}
|
|
|
|
|
|
|
|
if len(group_by_label) == 1:
|
|
|
|
label, languages = list(group_by_label.items())[0]
|
|
|
|
return {'label': label,
|
|
|
|
'languages': get_languages(languages)}
|
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
def get_labels(keys, name=None):
|
|
|
|
keys = sorted(keys, key=lambda i: int(i[1:]))
|
|
|
|
if name is None:
|
|
|
|
name = '_'.join(keys)
|
|
|
|
filename = f'cache/{name}_labels.json'
|
2019-09-14 13:26:16 +01:00
|
|
|
labels = []
|
2019-09-12 19:51:05 +01:00
|
|
|
if os.path.exists(filename):
|
2019-09-14 13:26:16 +01:00
|
|
|
from_cache = json.load(open(filename))
|
|
|
|
if isinstance(from_cache, dict) and from_cache.get('keys') == keys:
|
|
|
|
labels = from_cache['labels']
|
|
|
|
if not labels:
|
2019-09-14 13:44:53 +01:00
|
|
|
for cur in utils.chunk(keys, 50):
|
2019-09-16 08:59:53 +01:00
|
|
|
labels += mediawiki.get_entities(cur, props='labels')
|
2019-09-14 13:26:16 +01:00
|
|
|
|
|
|
|
json.dump({'keys': keys, 'labels': labels},
|
|
|
|
open(filename, 'w'), indent=2)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-09 15:30:48 +01:00
|
|
|
return {entity['id']: wikibase.get_entity_label(entity) for entity in labels}
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-10 20:52:11 +01:00
|
|
|
def build_other_set(entity):
|
2019-09-12 19:51:05 +01:00
|
|
|
other_items = set()
|
|
|
|
for key in find_more_props.keys():
|
|
|
|
if key not in entity['claims']:
|
|
|
|
continue
|
|
|
|
for claim in entity['claims'][key]:
|
2019-09-29 08:30:01 +01:00
|
|
|
if 'datavalue' in claim['mainsnak']:
|
|
|
|
other_items.add(claim['mainsnak']['datavalue']['value']['id'])
|
2019-10-10 20:52:11 +01:00
|
|
|
return other_items
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-10 20:52:11 +01:00
|
|
|
def get_other(entity):
|
|
|
|
other_items = build_other_set(entity)
|
2019-09-25 13:40:15 +01:00
|
|
|
return get_labels(other_items)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-29 09:47:55 +01:00
|
|
|
@app.route("/admin/edits")
|
|
|
|
def list_edits():
|
2019-10-07 12:23:13 +01:00
|
|
|
edit_list = Edit.query.order_by(Edit.timestamp.desc())
|
2019-09-29 11:23:07 +01:00
|
|
|
|
|
|
|
painting_count = (database.session
|
|
|
|
.query(func.count(distinct(Edit.painting_id)))
|
|
|
|
.scalar())
|
|
|
|
|
|
|
|
user_count = (database.session
|
|
|
|
.query(func.count(distinct(Edit.username)))
|
|
|
|
.scalar())
|
|
|
|
|
|
|
|
return render_template('list_edits.html',
|
|
|
|
edits=Edit.query,
|
|
|
|
edit_list=edit_list,
|
|
|
|
painting_count=painting_count,
|
|
|
|
user_count=user_count)
|
2019-09-29 09:47:55 +01:00
|
|
|
|
2019-09-29 11:52:12 +01:00
|
|
|
@app.route("/user/<username>")
|
|
|
|
def user_page(username):
|
2019-10-07 12:23:13 +01:00
|
|
|
edit_list = (Edit.query.filter_by(username=username)
|
|
|
|
.order_by(Edit.timestamp.desc()))
|
2019-09-29 11:52:12 +01:00
|
|
|
|
|
|
|
painting_count = (database.session
|
|
|
|
.query(func.count(distinct(Edit.painting_id)))
|
|
|
|
.filter_by(username=username)
|
|
|
|
.scalar())
|
|
|
|
|
|
|
|
return render_template('user_page.html',
|
|
|
|
username=username,
|
|
|
|
edits=Edit.query,
|
|
|
|
edit_list=edit_list,
|
|
|
|
painting_count=painting_count)
|
|
|
|
|
2019-09-25 13:40:15 +01:00
|
|
|
@app.route("/next/Q<int:item_id>")
|
|
|
|
def next_page(item_id):
|
|
|
|
qid = f'Q{item_id}'
|
|
|
|
|
|
|
|
entity = mediawiki.get_entity_with_cache(qid)
|
|
|
|
|
|
|
|
width = 800
|
2019-10-09 15:30:48 +01:00
|
|
|
image_filename = wikibase.first_datavalue(entity, 'P18')
|
2019-09-25 13:40:15 +01:00
|
|
|
image = image_with_cache(qid, image_filename, width)
|
|
|
|
|
2019-10-09 15:30:48 +01:00
|
|
|
label = wikibase.get_entity_label(entity)
|
2019-09-25 13:40:15 +01:00
|
|
|
other = get_other(entity)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-29 19:00:59 +01:00
|
|
|
other_list = []
|
|
|
|
for key, prop_label in find_more_props.items():
|
|
|
|
if key == 'P186': # skip material used
|
|
|
|
continue # too generic
|
|
|
|
claims = entity['claims'].get(key)
|
|
|
|
if not claims:
|
|
|
|
continue
|
|
|
|
|
|
|
|
values = []
|
|
|
|
|
|
|
|
for claim in claims:
|
2019-09-29 19:24:24 +01:00
|
|
|
if 'datavalue' not in claim['mainsnak']:
|
|
|
|
continue
|
2019-09-29 19:00:59 +01:00
|
|
|
value = claim['mainsnak']['datavalue']['value']
|
|
|
|
claim_qid = value['id']
|
2019-09-29 21:35:45 +01:00
|
|
|
if claim_qid == 'Q4233718':
|
|
|
|
continue # anonymous artist
|
2019-09-29 19:00:59 +01:00
|
|
|
numeric_id = value['numeric-id']
|
|
|
|
href = url_for('find_more_page', property_id=key[1:], item_id=numeric_id)
|
|
|
|
values.append({
|
|
|
|
'href': href,
|
|
|
|
'qid': claim_qid,
|
|
|
|
'label': other.get(claim_qid),
|
|
|
|
})
|
|
|
|
|
2019-09-29 19:24:24 +01:00
|
|
|
if not values:
|
|
|
|
continue
|
|
|
|
|
2019-09-29 19:00:59 +01:00
|
|
|
qid_list = [v['qid'] for v in values]
|
|
|
|
|
|
|
|
other_list.append({
|
|
|
|
'label': prop_label,
|
|
|
|
'image_lookup': url_for('find_more_json', pid=key, qid=qid_list),
|
|
|
|
'pid': key,
|
|
|
|
'values': values,
|
|
|
|
'images': [],
|
|
|
|
})
|
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
return render_template('next.html',
|
|
|
|
qid=qid,
|
|
|
|
label=label,
|
2019-09-25 13:40:15 +01:00
|
|
|
image=image,
|
2019-09-12 19:51:05 +01:00
|
|
|
labels=find_more_props,
|
2019-09-25 13:40:15 +01:00
|
|
|
other=other,
|
2019-09-29 19:00:59 +01:00
|
|
|
entity=entity,
|
|
|
|
other_props=other_list)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
|
|
|
@app.route('/P<int:property_id>/Q<int:item_id>')
|
|
|
|
def find_more_page(property_id, item_id):
|
|
|
|
pid, qid = f'P{property_id}', f'Q{item_id}'
|
2019-09-16 08:59:53 +01:00
|
|
|
return redirect(url_for('browse_page', **{pid: qid}))
|
2019-09-13 17:16:16 +01:00
|
|
|
|
2019-10-09 14:38:10 +01:00
|
|
|
def get_facets(params):
|
2019-09-13 17:16:16 +01:00
|
|
|
flat = '_'.join(f'{pid}={qid}' for pid, qid in params)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-09 14:38:10 +01:00
|
|
|
properties = [pid for pid in find_more_props.keys()
|
|
|
|
if pid not in request.args]
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-09 14:38:10 +01:00
|
|
|
q = render_template('query/facet.sparql',
|
|
|
|
params=params,
|
|
|
|
properties=properties)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-16 08:59:53 +01:00
|
|
|
bindings = wdqs.run_query_with_cache(q, flat + '_facets')
|
2019-09-13 17:16:16 +01:00
|
|
|
|
|
|
|
facets = {key: [] for key in find_more_props.keys()}
|
|
|
|
for row in bindings:
|
|
|
|
pid = row['property']['value'].rpartition('/')[2]
|
|
|
|
qid = row['object']['value'].rpartition('/')[2]
|
|
|
|
label = row['objectLabel']['value']
|
|
|
|
count = int(row['count']['value'])
|
|
|
|
|
|
|
|
facets[pid].append({'qid': qid, 'label': label, 'count': count})
|
|
|
|
|
|
|
|
return {
|
|
|
|
key: sorted(values, key=lambda i: i['count'], reverse=True)[:15]
|
|
|
|
for key, values in facets.items()
|
|
|
|
if values
|
|
|
|
}
|
|
|
|
|
2019-10-10 17:44:21 +01:00
|
|
|
def get_painting_params():
|
|
|
|
return [(pid, qid) for pid, qid in request.args.items()
|
|
|
|
if pid.startswith('P') and qid.startswith('Q')]
|
|
|
|
|
|
|
|
def filter_painting(params):
|
|
|
|
flat = '_'.join(f'{pid}={qid}' for pid, qid in params)
|
|
|
|
q = render_template('query/find_more.sparql', params=params)
|
|
|
|
bindings = wdqs.run_query_with_cache(q, flat)
|
|
|
|
|
|
|
|
return bindings
|
|
|
|
|
|
|
|
@app.route('/catalog')
|
|
|
|
def catalog_page():
|
|
|
|
params = get_painting_params()
|
|
|
|
bindings = filter_painting(params)
|
|
|
|
page = utils.get_int_arg('page') or 1
|
|
|
|
page_size = 45
|
|
|
|
|
|
|
|
item_ids = set()
|
|
|
|
for row in bindings:
|
|
|
|
item_id = wdqs.row_id(row)
|
|
|
|
item_ids.add(item_id)
|
|
|
|
|
|
|
|
qids = [f'Q{item_id}' for item_id in sorted(item_ids)]
|
|
|
|
|
|
|
|
entities = mediawiki.get_entities_with_cache(qids)
|
|
|
|
|
|
|
|
items = []
|
2019-10-10 20:52:11 +01:00
|
|
|
other_items = set()
|
2019-10-10 17:44:21 +01:00
|
|
|
for entity in entities:
|
2019-10-10 20:52:11 +01:00
|
|
|
other_items.update(build_other_set(entity))
|
2019-10-10 17:44:21 +01:00
|
|
|
item = {
|
|
|
|
'label': wikibase.get_entity_label(entity),
|
|
|
|
'qid': entity['id'],
|
|
|
|
'item_id': int(entity['id'][1:]),
|
|
|
|
'image_filename': wikibase.first_datavalue(entity, 'P18'),
|
2019-10-10 20:52:11 +01:00
|
|
|
'entity': entity,
|
2019-10-10 17:44:21 +01:00
|
|
|
}
|
|
|
|
items.append(item)
|
|
|
|
|
2019-10-10 20:52:11 +01:00
|
|
|
other = get_labels(other_items)
|
|
|
|
|
2019-10-10 17:44:21 +01:00
|
|
|
flat = '_'.join(f'{pid}={qid}' for pid, qid in params)
|
|
|
|
thumbwidth = 400
|
|
|
|
cache_name = f'{flat}_{page}_{page_size}_{thumbwidth}'
|
|
|
|
detail = get_image_detail_with_cache(items, cache_name, thumbwidth=thumbwidth)
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
item['url'] = url_for('item_page', item_id=item['item_id'])
|
|
|
|
item['image'] = detail[item['image_filename']]
|
|
|
|
|
2019-10-10 21:00:13 +01:00
|
|
|
item_labels = get_labels(qid for pid, qid in params)
|
|
|
|
title = ' / '.join(find_more_props[pid] + ': ' + item_labels[qid]
|
|
|
|
for pid, qid in params)
|
|
|
|
|
2019-10-10 20:52:11 +01:00
|
|
|
return render_template('catalog.html',
|
|
|
|
labels=find_more_props,
|
|
|
|
items=items,
|
2019-10-10 21:00:13 +01:00
|
|
|
other=other,
|
|
|
|
title=title)
|
2019-10-10 17:44:21 +01:00
|
|
|
|
|
|
|
def get_image_detail_with_cache(items, cache_name, thumbwidth=None):
|
|
|
|
filenames = [cur['image_filename'] for cur in items]
|
|
|
|
|
|
|
|
if thumbwidth is None:
|
|
|
|
thumbwidth = app.config['THUMBWIDTH']
|
|
|
|
|
|
|
|
filename = f'cache/{cache_name}_images.json'
|
|
|
|
if os.path.exists(filename):
|
|
|
|
detail = json.load(open(filename))
|
|
|
|
else:
|
|
|
|
detail = commons.image_detail(filenames, thumbwidth=thumbwidth)
|
|
|
|
json.dump(detail, open(filename, 'w'), indent=2)
|
|
|
|
|
|
|
|
return detail
|
|
|
|
|
2019-09-13 17:16:16 +01:00
|
|
|
@app.route('/browse')
|
|
|
|
def browse_page():
|
2019-10-10 17:44:21 +01:00
|
|
|
params = get_painting_params()
|
2019-09-13 17:16:16 +01:00
|
|
|
|
2019-09-27 15:35:26 +01:00
|
|
|
if not params:
|
|
|
|
return render_template('browse_index.html',
|
|
|
|
props=find_more_props,
|
2019-10-09 16:09:52 +01:00
|
|
|
username=wikidata_oauth.get_username())
|
2019-09-27 15:35:26 +01:00
|
|
|
|
2019-09-13 17:16:16 +01:00
|
|
|
flat = '_'.join(f'{pid}={qid}' for pid, qid in params)
|
|
|
|
|
|
|
|
item_labels = get_labels(qid for pid, qid in params)
|
|
|
|
|
2019-10-10 17:44:21 +01:00
|
|
|
bindings = filter_painting(params)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-09 14:38:10 +01:00
|
|
|
facets = get_facets(params)
|
2019-09-13 17:16:16 +01:00
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
page_size = 45
|
|
|
|
|
2019-09-16 08:59:53 +01:00
|
|
|
item_map = wdqs.build_browse_item_map(bindings)
|
2019-09-29 20:19:40 +01:00
|
|
|
|
|
|
|
all_items = []
|
2019-09-12 19:51:05 +01:00
|
|
|
for item in item_map.values():
|
|
|
|
if len(item['image_filename']) != 1:
|
|
|
|
continue
|
|
|
|
item['image_filename'] = item['image_filename'][0]
|
2019-09-29 20:19:40 +01:00
|
|
|
all_items.append(item)
|
|
|
|
|
|
|
|
page = utils.get_int_arg('page') or 1
|
|
|
|
pager = Pagination(page, page_size, len(all_items))
|
|
|
|
|
|
|
|
items = pager.slice(all_items)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-10-10 17:44:21 +01:00
|
|
|
cache_name = f'{flat}_{page}_{page_size}'
|
|
|
|
detail = get_image_detail_with_cache(items, cache_name)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
|
|
|
for item in items:
|
2019-09-16 08:59:53 +01:00
|
|
|
item['url'] = url_for('item_page', item_id=item['item_id'])
|
2019-09-12 19:51:05 +01:00
|
|
|
item['image'] = detail[item['image_filename']]
|
|
|
|
|
2019-10-09 13:29:43 +01:00
|
|
|
title = ' / '.join(find_more_props[pid] + ': ' + item_labels[qid]
|
|
|
|
for pid, qid in params)
|
2019-09-13 17:16:16 +01:00
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
return render_template('find_more.html',
|
2019-09-13 17:16:16 +01:00
|
|
|
facets=facets,
|
|
|
|
prop_labels=find_more_props,
|
|
|
|
label=title,
|
2019-09-29 20:19:40 +01:00
|
|
|
pager=pager,
|
2019-10-09 13:29:43 +01:00
|
|
|
params=params,
|
2019-09-30 14:23:19 +01:00
|
|
|
item_map=item_map,
|
2019-09-29 20:19:40 +01:00
|
|
|
page=page,
|
2019-09-12 19:51:05 +01:00
|
|
|
labels=find_more_props,
|
|
|
|
bindings=bindings,
|
2019-09-30 14:23:19 +01:00
|
|
|
total=len(item_map),
|
2019-09-16 08:59:53 +01:00
|
|
|
items=items)
|
2019-09-12 19:51:05 +01:00
|
|
|
|
2019-09-29 19:00:59 +01:00
|
|
|
@app.route('/find_more.json')
|
|
|
|
def find_more_json():
|
|
|
|
pid = request.args.get('pid')
|
|
|
|
qid_list = request.args.getlist('qid')
|
|
|
|
limit = 6
|
|
|
|
|
2019-10-09 15:27:35 +01:00
|
|
|
q = render_template('query/find_more_basic.sparql',
|
|
|
|
qid_list=qid_list,
|
|
|
|
pid=pid,
|
|
|
|
limit=limit)
|
2019-09-29 19:00:59 +01:00
|
|
|
|
|
|
|
filenames = []
|
|
|
|
bindings = wdqs.run_query_with_cache(q, f'{pid}={",".join(qid_list)}_{limit}')
|
|
|
|
items = []
|
|
|
|
for row in bindings:
|
|
|
|
item_id = wdqs.row_id(row)
|
|
|
|
row_qid = f'Q{item_id}'
|
|
|
|
image_filename = wdqs.commons_uri_to_filename(row['image']['value'])
|
|
|
|
filenames.append(image_filename)
|
|
|
|
items.append({'qid': row_qid,
|
|
|
|
'item_id': item_id,
|
|
|
|
'href': url_for('item_page', item_id=item_id),
|
|
|
|
'filename': image_filename})
|
|
|
|
|
|
|
|
thumbheight = 120
|
|
|
|
detail = commons.image_detail(filenames, thumbheight=thumbheight)
|
|
|
|
|
|
|
|
for item in items:
|
|
|
|
item['image'] = detail[item['filename']]
|
|
|
|
|
|
|
|
return jsonify(items=items, q=q)
|
|
|
|
|
2019-09-25 13:40:15 +01:00
|
|
|
@app.route('/lookup')
|
|
|
|
def depicts_lookup():
|
|
|
|
terms = request.args.get('terms')
|
|
|
|
if not terms:
|
|
|
|
return jsonify(error='terms parameter is required')
|
|
|
|
|
|
|
|
terms = terms.strip()
|
|
|
|
if len(terms) < 3:
|
|
|
|
return jsonify(
|
|
|
|
count=0,
|
|
|
|
hits=[],
|
|
|
|
notice='terms too short for lookup',
|
|
|
|
)
|
|
|
|
|
|
|
|
item_ids = []
|
|
|
|
hits = []
|
|
|
|
q1 = DepictsItem.query.filter(DepictsItem.label.ilike(terms + '%'))
|
2019-10-07 21:45:21 +01:00
|
|
|
seen = set()
|
2019-09-25 13:40:15 +01:00
|
|
|
for item in q1:
|
|
|
|
hit = {
|
|
|
|
'label': item.label,
|
|
|
|
'description': item.description,
|
|
|
|
'qid': item.qid,
|
|
|
|
'count': item.count,
|
|
|
|
}
|
|
|
|
item_ids.append(item.item_id)
|
|
|
|
hits.append(hit)
|
2019-10-07 21:45:21 +01:00
|
|
|
seen.add(item.qid)
|
2019-09-25 13:40:15 +01:00
|
|
|
|
|
|
|
cls = DepictsItemAltLabel
|
|
|
|
q2 = cls.query.filter(cls.alt_label.ilike(terms + '%'),
|
|
|
|
~cls.item_id.in_(item_ids))
|
|
|
|
|
|
|
|
for alt in q2:
|
|
|
|
item = alt.item
|
|
|
|
hit = {
|
|
|
|
'label': item.label,
|
|
|
|
'description': item.description,
|
|
|
|
'qid': item.qid,
|
|
|
|
'count': item.count,
|
|
|
|
'alt_label': alt.alt_label,
|
|
|
|
}
|
|
|
|
hits.append(hit)
|
2019-10-07 21:45:21 +01:00
|
|
|
seen.add(item.qid)
|
|
|
|
|
|
|
|
r = mediawiki.api_call({
|
|
|
|
'action': 'wbsearchentities',
|
|
|
|
'search': terms,
|
|
|
|
'limit': 'max',
|
|
|
|
'language': 'en'
|
|
|
|
})
|
2019-09-25 13:40:15 +01:00
|
|
|
hits.sort(key=lambda hit: hit['count'], reverse=True)
|
|
|
|
|
2019-10-07 21:45:21 +01:00
|
|
|
for result in r.json()['search']:
|
|
|
|
if result['id'] in seen:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen.add(result['id'])
|
|
|
|
hit = {
|
|
|
|
'label': result['label'],
|
|
|
|
'description': result.get('description') or None,
|
|
|
|
'qid': result['id'],
|
|
|
|
'count': 0,
|
|
|
|
}
|
|
|
|
if result['match']['type'] == 'alias':
|
|
|
|
hit['alt_label'] = result['match']['text']
|
|
|
|
hits.append(hit)
|
|
|
|
|
2019-09-25 13:40:15 +01:00
|
|
|
ret = {
|
|
|
|
'count': q1.count() + q2.count(),
|
|
|
|
'hits': hits,
|
|
|
|
'terms': terms,
|
|
|
|
}
|
|
|
|
|
|
|
|
return jsonify(ret)
|
|
|
|
|
2019-09-12 19:51:05 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app.debug = True
|
|
|
|
app.run(host='0.0.0.0', debug=True)
|