Skip to content

Commit

Permalink
Black
Browse files Browse the repository at this point in the history
  • Loading branch information
katjabercic committed Nov 2, 2023
1 parent 04bac47 commit d1cebdc
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 33 deletions.
6 changes: 1 addition & 5 deletions web/concepts/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,7 @@ class Meta:
unique_together = ["source", "identifier"]

def get_link(self):
return {
"name": self.name,
"source": self.source,
"url": self.url
}
return {"name": self.name, "source": self.source, "url": self.url}

def get_links(self):
return [linked_item.get_link() for linked_item in self.links.all()]
Expand Down
3 changes: 2 additions & 1 deletion web/concepts/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ def concept(request, source, item_id):
"name": item.name,
"description": item.description,
"url": item.url,
"links": item.get_links()
"links": item.get_links(),
}
}
return render(request, "detail.html", context)


def home(request):
return render(request, "index.html")
68 changes: 44 additions & 24 deletions web/slurper/source_wikidata.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@


class WikidataSlurper:

SPARQL_URL = "https://query.wikidata.org/sparql"

def __init__(self, source, query, id_map, url_map, name_map, desc_map):
Expand All @@ -25,14 +24,14 @@ def fetch_json(self):
params={"format": "json", "query": self.query},
)
return response.json()["results"]["bindings"]

def json_to_item(self, item) -> Optional[Item]:
return Item(
source=self.source,
identifier=self.id_map(item),
url=self.url_map(item),
name=self.name_map(item),
description=self.desc_map(item)
description=self.desc_map(item),
)

def get_items(self):
Expand All @@ -48,34 +47,51 @@ def save_items(self):

def save_links(self):
for json_item in self.raw_data:
currentItem = Item.objects.get(source=self.source, identifier=self.id_map(json_item))
currentItem = Item.objects.get(
source=self.source, identifier=self.id_map(json_item)
)
if self.source == Item.Source.WIKIDATA:
# nLab, MathWorld
if "nlabID" in json_item:
nlab_id = json_item["nlabID"]["value"]
try:
linkToItem = Item.objects.get(source=Item.Source.NLAB, identifier=nlab_id)
linkToItem = Item.objects.get(
source=Item.Source.NLAB, identifier=nlab_id
)
currentItem.links.add(linkToItem)
except:
logging.log(logging.WARNING, f" NLab item {nlab_id} does not exist in the database.")
logging.log(
logging.WARNING,
f" NLab item {nlab_id} does not exist in the database.",
)
elif "mwID" in json_item:
mw_id = json_item["mwID"]["value"]
try:
linkToItem = Item.objects.get(source=Item.Source.MATHWORLD, identifier=mw_id)
linkToItem = Item.objects.get(
source=Item.Source.MATHWORLD, identifier=mw_id
)
currentItem.links.add(linkToItem)
except:
logging.log(logging.WARNING, f" MathWorld item {mw_id} does not exist in the database.")
else: # link back to WD items
logging.log(
logging.WARNING,
f" MathWorld item {mw_id} does not exist in the database.",
)
else: # link back to WD items
wd_id = json_item["item"]["value"].split("/")[-1]
try:
linkToItem = Item.objects.get(source=Item.Source.WIKIDATA, identifier=wd_id)
linkToItem = Item.objects.get(
source=Item.Source.WIKIDATA, identifier=wd_id
)
currentItem.links.add(linkToItem)
except:
logging.log(logging.WARNING, f" Wikidata item {wd_id} does not exist in the database.")
logging.log(
logging.WARNING,
f" Wikidata item {wd_id} does not exist in the database.",
)


WD_SLURPER = WikidataSlurper(
Item.Source.WIKIDATA,
Item.Source.WIKIDATA,
"""
SELECT
DISTINCT ?item ?itemLabel ?itemDescription ?image
Expand Down Expand Up @@ -109,15 +125,17 @@ def save_links(self):
# collect the label and description
SERVICE wikibase:label { bd:serviceParam wikibase:language "en". }
}
""",
""",
id_map=lambda item: item["item"]["value"].split("/")[-1],
url_map=lambda item: item["item"]["value"],
name_map=lambda item: item["itemLabel"]["value"] if ("itemLabel" in item) else None,
desc_map=lambda item: item["itemDescription"]["value"] if ("itemDescription" in item) else None
)
desc_map=lambda item: item["itemDescription"]["value"]
if ("itemDescription" in item)
else None,
)

WD_NLAB_SLURPER = WikidataSlurper(
Item.Source.NLAB,
Item.Source.NLAB,
"""
SELECT
DISTINCT ?item ?nlabID
Expand All @@ -129,15 +147,15 @@ def save_links(self):
# collect the label and description
SERVICE wikibase:label { bd:serviceParam wikibase:language "en". }
}
""",
""",
id_map=lambda item: item["nlabID"]["value"],
url_map=lambda item: "https://ncatlab.org/nlab/show/" + item["nlabID"]["value"],
name_map=lambda item: item["nlabID"]["value"],
desc_map=lambda _: None
)
desc_map=lambda _: None,
)

WD_MATHWORLD_SLURPER = WikidataSlurper(
Item.Source.MATHWORLD,
Item.Source.MATHWORLD,
"""
SELECT
DISTINCT ?item ?mwID
Expand All @@ -149,9 +167,11 @@ def save_links(self):
# collect the label and description
SERVICE wikibase:label { bd:serviceParam wikibase:language "en". }
}
""",
""",
id_map=lambda item: item["mwID"]["value"],
url_map=lambda item: "https://mathworld.wolfram.com/" + item["mwID"]["value"] + ".html",
url_map=lambda item: "https://mathworld.wolfram.com/"
+ item["mwID"]["value"]
+ ".html",
name_map=lambda item: item["mwID"]["value"],
desc_map=lambda _: None
)
desc_map=lambda _: None,
)
4 changes: 2 additions & 2 deletions web/web/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
SECRET_KEY = "django-insecure-9wy9w#vf^tde0262doyy_j19=64c()_qub!1)f+fh-b^=7ndw*"

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
DEBUG = False

ALLOWED_HOSTS = []
ALLOWED_HOSTS = ["0.0.0.0"]


# Application definition
Expand Down
2 changes: 1 addition & 1 deletion web/web/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from concepts import views

urlpatterns = [
path('', views.home),
path("", views.home),
path("concept/", include("concepts.urls")),
path("admin/", admin.site.urls),
]

0 comments on commit d1cebdc

Please sign in to comment.