81 lines
5.8 KiB
YAML
81 lines
5.8 KiB
YAML
avg_processing_time_seconds: 25.811143970489503
|
|
completed_at: '2025-12-05T09:19:10.030121+00:00'
|
|
errors:
|
|
- entry_id: 0003/drentsmuseum.nl
|
|
errors:
|
|
- 'Processing failed: expected string or bytes-like object, got ''UncertaintyDetector'''
|
|
- "Traceback (most recent call last):\n File \"/Users/kempersc/apps/glam/scripts/batch_extract_web_annotations.py\"\
|
|
, line 244, in process_entry\n confidence = get_confidence_score(text, uncertainty_detector)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 733, in get_confidence_score\n return detector.get_claim_confidence(claim,\
|
|
\ context)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 616, in get_claim_confidence\n context_analysis = self.analyze(context)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^\n File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 564, in analyze\n hedges = self.detect_hedges(text)\n ^^^^^^^^^^^^^^^^^^^^^^^^\n\
|
|
\ File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 476, in detect_hedges\n for match in pattern.finditer(text):\n \
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^\nTypeError: expected string or bytes-like\
|
|
\ object, got 'UncertaintyDetector'\n"
|
|
- entry_id: 0004/dmdebuitenplaats.nl
|
|
errors:
|
|
- 'Processing failed: expected string or bytes-like object, got ''UncertaintyDetector'''
|
|
- "Traceback (most recent call last):\n File \"/Users/kempersc/apps/glam/scripts/batch_extract_web_annotations.py\"\
|
|
, line 244, in process_entry\n confidence = get_confidence_score(text, uncertainty_detector)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 733, in get_confidence_score\n return detector.get_claim_confidence(claim,\
|
|
\ context)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 616, in get_claim_confidence\n context_analysis = self.analyze(context)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^\n File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 564, in analyze\n hedges = self.detect_hedges(text)\n ^^^^^^^^^^^^^^^^^^^^^^^^\n\
|
|
\ File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 476, in detect_hedges\n for match in pattern.finditer(text):\n \
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^\nTypeError: expected string or bytes-like\
|
|
\ object, got 'UncertaintyDetector'\n"
|
|
- entry_id: 0005/aaenhunze.nl
|
|
errors:
|
|
- 'Processing failed: expected string or bytes-like object, got ''UncertaintyDetector'''
|
|
- "Traceback (most recent call last):\n File \"/Users/kempersc/apps/glam/scripts/batch_extract_web_annotations.py\"\
|
|
, line 244, in process_entry\n confidence = get_confidence_score(text, uncertainty_detector)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 733, in get_confidence_score\n return detector.get_claim_confidence(claim,\
|
|
\ context)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 616, in get_claim_confidence\n context_analysis = self.analyze(context)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^\n File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 564, in analyze\n hedges = self.detect_hedges(text)\n ^^^^^^^^^^^^^^^^^^^^^^^^\n\
|
|
\ File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 476, in detect_hedges\n for match in pattern.finditer(text):\n \
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^\nTypeError: expected string or bytes-like\
|
|
\ object, got 'UncertaintyDetector'\n"
|
|
- entry_id: 0006/borger-odoorn.nl
|
|
errors:
|
|
- 'Processing failed: expected string or bytes-like object, got ''UncertaintyDetector'''
|
|
- "Traceback (most recent call last):\n File \"/Users/kempersc/apps/glam/scripts/batch_extract_web_annotations.py\"\
|
|
, line 244, in process_entry\n confidence = get_confidence_score(text, uncertainty_detector)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 733, in get_confidence_score\n return detector.get_claim_confidence(claim,\
|
|
\ context)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\
|
|
/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\", line\
|
|
\ 616, in get_claim_confidence\n context_analysis = self.analyze(context)\n\
|
|
\ ^^^^^^^^^^^^^^^^^^^^^\n File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 564, in analyze\n hedges = self.detect_hedges(text)\n ^^^^^^^^^^^^^^^^^^^^^^^^\n\
|
|
\ File \"/Users/kempersc/apps/glam/src/glam_extractor/annotators/uncertainty.py\"\
|
|
, line 476, in detect_hedges\n for match in pattern.finditer(text):\n \
|
|
\ ^^^^^^^^^^^^^^^^^^^^^^\nTypeError: expected string or bytes-like\
|
|
\ object, got 'UncertaintyDetector'\n"
|
|
failed_entries: 4
|
|
processed_entries: 5
|
|
skipped_entries: 0
|
|
started_at: '2025-12-05T09:16:58.459325+00:00'
|
|
successful_entries: 1
|
|
total_claims: 0
|
|
total_entities: 0
|
|
total_entries: 5
|
|
total_layout_regions: 0
|
|
total_relationships: 0
|