- Introduced `test_nlp_extractor.py` with unit tests for the InstitutionExtractor, covering various extraction patterns (ISIL, Wikidata, VIAF, city names) and ensuring proper classification of institutions (museum, library, archive). - Added tests for extracted entities and result handling to validate the extraction process. - Created `test_partnership_rdf_integration.py` to validate the end-to-end process of extracting partnerships from a conversation and exporting them to RDF format. - Implemented tests for temporal properties in partnerships and ensured compliance with W3C Organization Ontology patterns. - Verified that extracted partnerships are correctly linked with PROV-O provenance metadata.
157 lines
5.9 KiB
Python
157 lines
5.9 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Chilean GLAM Wikidata Enrichment - Batch 16 (Web Search Verification)
|
|
|
|
Apply manually verified Wikidata identifiers found via web search:
|
|
- USACH's Archivo Patrimonial → Q1259453 (University of Santiago de Chile)
|
|
- Arzobispado's Archivo Histórico → Q175832 (Roman Catholic Archdiocese of Santiago)
|
|
|
|
Target: 65/90 institutions (72.2% coverage) ✓
|
|
Previous: 63/90 (70.0%)
|
|
|
|
Manual verification performed via exa_web_search after Wikidata API rate limits.
|
|
Both matches verified via Wikipedia and institutional websites.
|
|
"""
|
|
|
|
import sys
|
|
from pathlib import Path
|
|
from datetime import datetime, timezone
|
|
import yaml
|
|
|
|
|
|
def load_institutions(filepath: Path) -> list[dict]:
|
|
"""Load YAML file as list of dictionaries."""
|
|
with open(filepath, 'r', encoding='utf-8') as f:
|
|
return yaml.safe_load(f)
|
|
|
|
|
|
def save_institutions(institutions: list[dict], filepath: Path):
|
|
"""Save institutions to YAML file."""
|
|
with open(filepath, 'w', encoding='utf-8') as f:
|
|
yaml.dump(institutions, f, allow_unicode=True, sort_keys=False, width=120)
|
|
|
|
|
|
def enrich_batch16(institutions: list[dict]) -> tuple[list[dict], int]:
|
|
"""
|
|
Apply Batch 16 Wikidata enrichments (web search verification).
|
|
|
|
Returns:
|
|
Tuple of (enriched_institutions, count_enriched)
|
|
"""
|
|
enrichment_date = datetime.now(timezone.utc).isoformat()
|
|
enriched_count = 0
|
|
|
|
# Batch 16: Web search verification (2 institutions)
|
|
# Q1259453 - University of Santiago de Chile (parent org for USACH archive)
|
|
# Q175832 - Roman Catholic Archdiocese of Santiago (parent org for Arzobispado archive)
|
|
batch16_matches = [
|
|
("USACH's Archivo Patrimonial", "Q1259453",
|
|
"University of Santiago de Chile (USACH), public research university founded 1849 (as Escuela de Artes y Oficios)"),
|
|
("Arzobispado's Archivo Histórico", "Q175832",
|
|
"Roman Catholic Archdiocese of Santiago, Catholic archdiocese in Chile established 1840")
|
|
]
|
|
|
|
for inst in institutions:
|
|
name = inst.get('name', '')
|
|
|
|
# Check if institution already has Wikidata identifier
|
|
existing_wikidata = any(
|
|
id_item.get('identifier_scheme') == 'Wikidata'
|
|
for id_item in inst.get('identifiers', [])
|
|
)
|
|
|
|
if existing_wikidata:
|
|
continue # Skip if already enriched
|
|
|
|
# Check against Batch 16 matches
|
|
for match_name, q_number, verification in batch16_matches:
|
|
if match_name.lower() in name.lower():
|
|
# Add Wikidata identifier
|
|
if 'identifiers' not in inst:
|
|
inst['identifiers'] = []
|
|
|
|
inst['identifiers'].append({
|
|
'identifier_scheme': 'Wikidata',
|
|
'identifier_value': q_number,
|
|
'identifier_url': f'https://www.wikidata.org/wiki/{q_number}'
|
|
})
|
|
|
|
# Update provenance
|
|
if 'provenance' not in inst:
|
|
inst['provenance'] = {}
|
|
|
|
inst['provenance']['data_tier'] = 'TIER_3_CROWD_SOURCED'
|
|
inst['provenance']['last_updated'] = enrichment_date
|
|
inst['provenance']['enrichment_batch'] = 16
|
|
inst['provenance']['wikidata_match_confidence'] = 'HIGH'
|
|
inst['provenance']['wikidata_match_reason'] = 'Manual verification via web search - parent organization identified'
|
|
inst['provenance']['wikidata_name'] = verification
|
|
|
|
enriched_count += 1
|
|
print(f"✓ Enriched: {name} → {q_number}")
|
|
print(f" Wikidata: {verification}")
|
|
break
|
|
|
|
return institutions, enriched_count
|
|
|
|
|
|
def main():
|
|
"""Main enrichment workflow."""
|
|
# Paths
|
|
input_file = Path("data/instances/chile/chilean_institutions_batch15_enriched.yaml")
|
|
output_file = Path("data/instances/chile/chilean_institutions_batch16_enriched.yaml")
|
|
|
|
print("=" * 80)
|
|
print("Chilean GLAM Wikidata Enrichment - Batch 16")
|
|
print("Web Search Verification Results")
|
|
print("=" * 80)
|
|
print()
|
|
|
|
# Load data
|
|
print(f"Loading: {input_file}")
|
|
institutions = load_institutions(input_file)
|
|
print(f" Total institutions: {len(institutions)}")
|
|
|
|
# Count current Wikidata coverage
|
|
current_with_wikidata = sum(
|
|
1 for inst in institutions
|
|
if any(id_item.get('identifier_scheme') == 'Wikidata'
|
|
for id_item in inst.get('identifiers', []))
|
|
)
|
|
print(f" Current Wikidata coverage: {current_with_wikidata}/{len(institutions)} ({current_with_wikidata/len(institutions)*100:.1f}%)")
|
|
print()
|
|
|
|
# Apply Batch 16 enrichments
|
|
print("Applying Batch 16 enrichments...")
|
|
institutions, enriched_count = enrich_batch16(institutions)
|
|
print()
|
|
|
|
# Calculate new coverage
|
|
new_with_wikidata = sum(
|
|
1 for inst in institutions
|
|
if any(id_item.get('identifier_scheme') == 'Wikidata'
|
|
for id_item in inst.get('identifiers', []))
|
|
)
|
|
|
|
print("=" * 80)
|
|
print("Enrichment Summary")
|
|
print("=" * 80)
|
|
print(f"Institutions enriched: {enriched_count}")
|
|
print(f"New Wikidata coverage: {new_with_wikidata}/{len(institutions)} ({new_with_wikidata/len(institutions)*100:.1f}%)")
|
|
print(f"Coverage change: +{new_with_wikidata - current_with_wikidata} ({(new_with_wikidata - current_with_wikidata)/len(institutions)*100:.1f}%)")
|
|
print()
|
|
|
|
# Save enriched data
|
|
print(f"Saving: {output_file}")
|
|
save_institutions(institutions, output_file)
|
|
print("✓ Done!")
|
|
print()
|
|
|
|
# Celebrate reaching 72%!
|
|
print(f"🎉 Progress: {new_with_wikidata}/90 ({new_with_wikidata/90*100:.1f}%)")
|
|
print(f" Previous: 70.0% → Current: {new_with_wikidata/90*100:.1f}%")
|
|
print(f" Exceeded 70% target by {new_with_wikidata - 63} institution(s)!")
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|