David Dalrymple
david-dalrymplepersonPath: /knowledge-base/people/david-dalrymple/
E1998Entity ID (EID)
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
"id": "david-dalrymple",
"wikiId": null,
"path": "/knowledge-base/people/david-dalrymple/",
"filePath": "knowledge-base/people/david-dalrymple.mdx",
"title": "David Dalrymple",
"quality": 61,
"readerImportance": 32.5,
"researchImportance": null,
"tacticalValue": 55,
"contentFormat": "article",
"causalLevel": null,
"lastUpdated": "2026-03-22",
"dateCreated": "2026-03-22",
"summary": "A comprehensive biographical profile of David Dalrymple covering his role directing ARIA's £59M Safeguarded AI programme, his technical approach to formal verification for safety-critical AI, and his views on AI timelines. Content is well-organized and concrete but has significant citation gaps—footnotes are vague research notes rather than verifiable primary sources, and several key biographical and economic claims lack proper sourcing.",
"description": "AI safety researcher and Programme Director at ARIA, leading a £59M Safeguarded AI programme focused on formal verification and mathematical guarantees for safe AI deployment.",
"ratings": {
"focus": 8.5,
"novelty": 2.5,
"rigor": 3.5,
"completeness": 7,
"concreteness": 6.5,
"actionability": 2,
"objectivity": 5.5
},
"category": "people",
"subcategory": "safety-researchers",
"clusters": [
"ai-safety"
],
"metrics": {
"wordCount": 1883,
"tableCount": 2,
"diagramCount": 0,
"internalLinks": 26,
"externalLinks": 2,
"footnoteCount": 8,
"bulletRatio": 0.07,
"sectionCount": 12,
"hasOverview": true,
"structuralScore": 14
},
"suggestedQuality": 93,
"updateFrequency": null,
"evergreen": true,
"wordCount": 1883,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 2,
"hallucinationRisk": {
"level": "medium",
"score": 60,
"factors": [
"biographical-claims",
"low-rigor-score",
"moderately-cited"
]
},
"entityType": "person",
"redundancy": {
"maxSimilarity": 13,
"similarPages": [
{
"id": "self-improvement",
"title": "Self-Improvement and Recursive Enhancement",
"path": "/knowledge-base/capabilities/self-improvement/",
"similarity": 13
},
{
"id": "miri-era",
"title": "The MIRI Era (2000-2015)",
"path": "/knowledge-base/history/miri-era/",
"similarity": 13
},
{
"id": "ai-futures-project",
"title": "AI Futures Project",
"path": "/knowledge-base/organizations/ai-futures-project/",
"similarity": 13
},
{
"id": "arc",
"title": "Alignment Research Center (ARC)",
"path": "/knowledge-base/organizations/arc/",
"similarity": 13
},
{
"id": "aria-uk",
"title": "Advanced Research and Invention Agency (ARIA)",
"path": "/knowledge-base/organizations/aria-uk/",
"similarity": 13
}
]
},
"changeHistory": [
{
"date": "2026-03-22",
"branch": "claude/add-davidad-aria-entities",
"title": "Auto-improve (polish): David Dalrymple",
"summary": "Polish pass on \"David Dalrymple\". Duration: 207.1s.",
"duration": "207.1s",
"cost": "$2-3"
}
],
"coverage": {
"passing": 6,
"total": 13,
"targets": {
"tables": 8,
"diagrams": 1,
"internalLinks": 15,
"externalLinks": 9,
"footnotes": 6,
"references": 6
},
"actuals": {
"tables": 2,
"diagrams": 0,
"internalLinks": 26,
"externalLinks": 2,
"footnotes": 8,
"references": 0,
"quotesWithQuotes": 0,
"quotesTotal": 0,
"accuracyChecked": 0,
"accuracyTotal": 0
},
"items": {
"summary": "green",
"schedule": "red",
"entity": "green",
"editHistory": "green",
"overview": "green",
"tables": "amber",
"diagrams": "red",
"internalLinks": "green",
"externalLinks": "amber",
"footnotes": "green",
"references": "red",
"quotes": "red",
"accuracy": "red"
},
"editHistoryCount": 1,
"ratingsString": "N:2.5 R:3.5 A:2 C:7"
},
"readerRank": 430,
"recommendedScore": 157.79
}External Links
No external links
Backlinks (2)
| id | title | type | relationship |
|---|---|---|---|
| aria-uk | Advanced Research and Invention Agency (ARIA) | organization | leads-to |
| provably-safe | Provably Safe AI (davidad agenda) | approach | — |