move remaining files

This commit is contained in:
Bruce Röttgers
2025-09-29 17:51:42 +02:00
parent 183aa1ae66
commit 0bf0238ef1
5 changed files with 21 additions and 116 deletions

1577
src/tests/fixtures/boston_input.txt vendored Normal file

File diff suppressed because it is too large Load Diff

2905
src/tests/fixtures/boston_output.json vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,333 @@
import json
import os
import pytest
from transformers import WikivoyageParser
def dump(obj):
# canonical JSON for deep compare
return json.dumps(obj, sort_keys=True, separators=(",", ":"))
def wrap(children):
"""Wrap a list of child nodes in the default root envelope."""
return {
"type": "root",
"properties": {},
"children": children
}
@pytest.fixture
def parser():
return WikivoyageParser()
def test_empty_input_is_root_only(parser):
got = parser.parse("")
assert dump(got) == dump(wrap([]))
def test_plain_text_node(parser):
got = parser.parse("Just some plain text.")
expected = wrap([
{"type":"text","properties":{"markdown":"Just some plain text."},"children":[]}
])
assert dump(got) == dump(expected)
def test_template_node(parser):
got = parser.parse("{{foo|a=1|b=two}}")
expected = wrap([
{
"type":"template",
"properties":{"name":"foo","params":{"a":"1","b":"two"}},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_see_listing_full_properties(parser):
snippet = (
"{{see"
"|name=Statue"
"|alt=Monument"
"|url=http://x"
"|email=a@b.com"
"|address=1 Road"
"|lat=1.23"
"|long=4.56"
"|directions=North"
"|phone=12345"
"|tollfree=800"
"|fax=54321"
"|hours=24/7"
"|price=Free"
"|lastedit=2020-01-01"
"|wikipedia=Statue"
"|wikidata=Q1"
"|content=Big statue"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"see",
"properties":{
"name":"Statue","alt":"Monument","url":"http://x",
"email":"a@b.com","address":"1 Road","lat":"1.23","long":"4.56",
"directions":"North","phone":"12345","tollfree":"800",
"fax":"54321","hours":"24/7","price":"Free",
"lastedit":"2020-01-01","wikipedia":"Statue","wikidata":"Q1",
"content":"Big statue"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_do_listing_full_properties(parser):
snippet = (
"{{do"
"|name=Walk"
"|alt=Stroll"
"|url=http://walk"
"|email=hi@walk"
"|address=Main Street"
"|lat=2.34"
"|long=5.67"
"|directions=East"
"|phone=222-333"
"|tollfree=800-DO-WALK"
"|fax=999-888"
"|hours=All day"
"|price=Free"
"|lastedit=2021-02-02"
"|wikipedia=Walking"
"|wikidata=Q2"
"|content=Enjoy a walk"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"do",
"properties":{
"name":"Walk","alt":"Stroll","url":"http://walk",
"email":"hi@walk","address":"Main Street","lat":"2.34","long":"5.67",
"directions":"East","phone":"222-333","tollfree":"800-DO-WALK",
"fax":"999-888","hours":"All day","price":"Free",
"lastedit":"2021-02-02","wikipedia":"Walking","wikidata":"Q2",
"content":"Enjoy a walk"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_buy_listing_full_properties(parser):
snippet = (
"{{buy"
"|name=Shirt"
"|alt=Tees"
"|url=http://shop"
"|email=sales@shop"
"|address=Market St"
"|lat=3.45"
"|long=6.78"
"|directions=West"
"|phone=444-555"
"|tollfree=800-BUY-TEE"
"|fax=777-666"
"|hours=96"
"|price=$20"
"|lastedit=2022-03-03"
"|wikipedia=Shopping"
"|wikidata=Q3"
"|content=Quality tees"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"buy",
"properties":{
"name":"Shirt","alt":"Tees","url":"http://shop",
"email":"sales@shop","address":"Market St","lat":"3.45","long":"6.78",
"directions":"West","phone":"444-555","tollfree":"800-BUY-TEE",
"fax":"777-666","hours":"96","price":"$20",
"lastedit":"2022-03-03","wikipedia":"Shopping","wikidata":"Q3",
"content":"Quality tees"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_eat_listing_full_properties(parser):
snippet = (
"{{eat"
"|name=Diner"
"|alt=Cafe"
"|url=http://eat"
"|email=food@eat"
"|address=Food Lane"
"|lat=4.56"
"|long=7.89"
"|directions=South"
"|phone=666-777"
"|tollfree=800-EAT-YUM"
"|fax=555-444"
"|hours=Breakfast"
"|price=$10$30"
"|lastedit=2023-04-04"
"|wikipedia=Dining"
"|wikidata=Q4"
"|content=Best pancakes"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"eat",
"properties":{
"name":"Diner","alt":"Cafe","url":"http://eat",
"email":"food@eat","address":"Food Lane","lat":"4.56","long":"7.89",
"directions":"South","phone":"666-777","tollfree":"800-EAT-YUM",
"fax":"555-444","hours":"Breakfast","price":"$10$30",
"lastedit":"2023-04-04","wikipedia":"Dining","wikidata":"Q4",
"content":"Best pancakes"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_drink_listing_full_properties(parser):
snippet = (
"{{drink"
"|name=Pub"
"|alt=Bar"
"|url=http://drink"
"|email=cheers@drink"
"|address=Bar Street"
"|lat=5.67"
"|long=8.90"
"|directions=Center"
"|phone=888-999"
"|tollfree=800-DRINK"
"|fax=333-222"
"|hours=Evening"
"|price=$7$30"
"|lastedit=2024-05-05"
"|wikipedia=Nightlife"
"|wikidata=Q5"
"|content=Great brews"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"drink",
"properties":{
"name":"Pub","alt":"Bar","url":"http://drink",
"email":"cheers@drink","address":"Bar Street","lat":"5.67","long":"8.90",
"directions":"Center","phone":"888-999","tollfree":"800-DRINK",
"fax":"333-222","hours":"Evening","price":"$7$30",
"lastedit":"2024-05-05","wikipedia":"Nightlife","wikidata":"Q5",
"content":"Great brews"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_sleep_listing_full_properties(parser):
snippet = (
"{{sleep"
"|name=Hotel"
"|alt=Inn"
"|url=http://sleep"
"|email=stay@sleep"
"|address=Sleepy Ave"
"|lat=6.78"
"|long=9.01"
"|directions=Uptown"
"|phone=000-111"
"|tollfree=800-SLEEP"
"|fax=111-000"
"|hours=24h"
"|price=$100"
"|lastedit=2025-06-06"
"|wikipedia=Accommodation"
"|wikidata=Q6"
"|checkin=3PM"
"|checkout=11AM"
"|content=Cozy rooms"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"sleep",
"properties":{
"name":"Hotel","alt":"Inn","url":"http://sleep",
"email":"stay@sleep","address":"Sleepy Ave","lat":"6.78","long":"9.01",
"directions":"Uptown","phone":"000-111","tollfree":"800-SLEEP",
"fax":"111-000","hours":"24h","price":"$100",
"lastedit":"2025-06-06","wikipedia":"Accommodation","wikidata":"Q6",
"checkin":"3PM","checkout":"11AM","content":"Cozy rooms"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_generic_listing_full_properties(parser):
snippet = (
"{{listing"
"|name=Info"
"|alt=Data"
"|url=http://info"
"|email=info@info"
"|address=Down St"
"|lat=7.89"
"|long=0.12"
"|directions=Here"
"|phone=123-000"
"|tollfree=800-INFO"
"|fax=000-123"
"|hours=All times"
"|price=$0"
"|lastedit=2026-07-07"
"|wikipedia=InfoPage"
"|wikidata=Q7"
"|content=Useful info"
"}}"
)
got = parser.parse(snippet)
expected = wrap([
{
"type":"listing",
"properties":{
"name":"Info","alt":"Data","url":"http://info",
"email":"info@info","address":"Down St","lat":"7.89","long":"0.12",
"directions":"Here","phone":"123-000","tollfree":"800-INFO",
"fax":"000-123","hours":"All times","price":"$0",
"lastedit":"2026-07-07","wikipedia":"InfoPage","wikidata":"Q7",
"content":"Useful info"
},
"children":[]
}
])
assert dump(got) == dump(expected)
def test_section_and_subsection(parser):
got = parser.parse("Intro\n== First ==\nHello\n=== Sub ===\nWorld")
sec = got["children"][1]
assert sec["type"] == "section" and sec["properties"]["level"] == 2
sub = sec["children"][1]
assert sub["type"] == "section" and sub["properties"]["level"] == 3
def test_full_boston_snapshot(parser):
here = os.path.dirname(__file__)
inp = os.path.join(here, "fixtures", "boston_input.txt")
out = os.path.join(here, "fixtures", "boston_output.json")
wikicode = open(inp, encoding="utf-8").read()
expected = json.load(open(out, encoding="utf-8"))
got = parser.parse(wikicode)
assert dump(got) == dump(expected)

View File

@@ -5,20 +5,19 @@ from .parser import WikivoyageParser
logger = getLogger(__name__)
class WikiDumpHandler(xml.sax.ContentHandler):
"""
SAX handler that, for each <page> whose <id> is in mappings,
collects the <text> and schedules an async task to parse
and write via the usersupplied handler.
and write via the usersupplied handler(s).
"""
def __init__(self, mappings, handler, max_concurrent):
def __init__(self, mappings, handlers):
super().__init__()
self.mappings = mappings
self.handler = handler
self.sem = (
asyncio.Semaphore(max_concurrent) if max_concurrent > 0 else None
)
# Support a single handler or a list of handlers
self.handlers = handlers
self.tasks: list[asyncio.Task] = []
self.currentTag: str | None = None
@@ -26,6 +25,7 @@ class WikiDumpHandler(xml.sax.ContentHandler):
self.inRevision = False
self.inText = False
self.currentPageId: str | None = None
self.currentTitle: str | None = None
self.currentText: list[str] = []
def startElement(self, name, attrs):
@@ -34,6 +34,7 @@ class WikiDumpHandler(xml.sax.ContentHandler):
logger.debug("start page")
self.inPage = True
self.currentPageId = None
self.currentTitle = None
self.currentText = []
elif name == "revision":
logger.debug("start revision")
@@ -49,18 +50,17 @@ class WikiDumpHandler(xml.sax.ContentHandler):
if pid and pid in self.mappings:
wd_id = self.mappings[pid]
text = "".join(self.currentText)
title = self.currentTitle
logger.debug(f"scheduled {wd_id} for handling")
# schedule processing
if self.sem:
task = asyncio.create_task(self._bounded_process(text, wd_id))
else:
task = asyncio.create_task(self._process(text, wd_id))
task = asyncio.create_task(self._process(text, wd_id, title))
self.tasks.append(task)
else:
logger.debug(f"page {pid} without wikidata id, skipping...")
# reset
self.inPage = self.inRevision = self.inText = False
self.currentPageId = None
self.currentTitle = None
self.currentText = []
elif name == "revision":
logger.debug("end revision")
@@ -81,16 +81,21 @@ class WikiDumpHandler(xml.sax.ContentHandler):
content_stripped = content.strip()
if content_stripped: # Only process non-empty ID content
self.currentPageId = content_stripped
elif self.currentTag == "title" and self.inPage:
if self.currentTitle is None:
self.currentTitle = content
else:
self.currentTitle += content
elif self.inText:
# Always append text content, even if it's just whitespace or newlines
self.currentText.append(content)
async def _process(self, text: str, uid: str):
async def _process(self, text: str, uid: str, title: str):
parser = WikivoyageParser()
entry = parser.parse(text)
await self.handler.write_entry(entry, uid)
entry["properties"]["title"] = title
async def _bounded_process(self, text: str, uid: str):
# Only run N at once
async with self.sem:
await self._process(text, uid)
# Write to all handlers concurrently
await asyncio.gather(
*[handler.write_entry(entry, uid) for handler in self.handlers]
)