Jared Kaplan
jared-kaplanpersonPath: /knowledge-base/people/jared-kaplan/
E1239Entity ID (EID)
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
"id": "jared-kaplan",
"wikiId": "E1239",
"path": "/knowledge-base/people/jared-kaplan/",
"filePath": "knowledge-base/people/jared-kaplan.mdx",
"title": "Jared Kaplan",
"quality": null,
"readerImportance": null,
"researchImportance": null,
"tacticalValue": null,
"contentFormat": "article",
"causalLevel": null,
"lastUpdated": "2026-03-23",
"dateCreated": "2026-03-23",
"summary": "Comprehensive biographical profile of Jared Kaplan covering his scaling laws research, Anthropic co-founder role, and Responsible Scaling Officer appointment, with notable coverage of RSP enforcement and timeline predictions through mid-2025; however, the entire article relies on a single Perplexity research compilation rather than primary sources, significantly undermining verifiability.",
"description": "Theoretical physicist, co-founder and Chief Science Officer of Anthropic, and pioneer of AI scaling laws research.",
"ratings": null,
"category": "people",
"subcategory": "safety-researchers",
"clusters": [
"ai-safety"
],
"metrics": {
"wordCount": 1659,
"tableCount": 2,
"diagramCount": 0,
"internalLinks": 5,
"externalLinks": 2,
"footnoteCount": 1,
"bulletRatio": 0.08,
"sectionCount": 11,
"hasOverview": true,
"structuralScore": 12
},
"suggestedQuality": 80,
"updateFrequency": 90,
"evergreen": true,
"wordCount": 1659,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 10,
"hallucinationRisk": {
"level": "high",
"score": 80,
"factors": [
"biographical-claims",
"low-citation-density",
"mostly-unsourced-footnotes"
],
"integrityIssues": [
"mostly-unsourced-footnotes"
]
},
"entityType": "person",
"redundancy": {
"maxSimilarity": 17,
"similarPages": [
{
"id": "sam-mccandlish",
"title": "Sam McCandlish",
"path": "/knowledge-base/people/sam-mccandlish/",
"similarity": 17
},
{
"id": "chris-olah",
"title": "Chris Olah",
"path": "/knowledge-base/people/chris-olah/",
"similarity": 15
},
{
"id": "ilya-sutskever",
"title": "Ilya Sutskever",
"path": "/knowledge-base/people/ilya-sutskever/",
"similarity": 15
},
{
"id": "jack-clark",
"title": "Jack Clark",
"path": "/knowledge-base/people/jack-clark/",
"similarity": 15
},
{
"id": "anthropic-core-views",
"title": "Anthropic Core Views",
"path": "/knowledge-base/responses/anthropic-core-views/",
"similarity": 15
}
]
},
"coverage": {
"passing": 4,
"total": 13,
"targets": {
"tables": 7,
"diagrams": 1,
"internalLinks": 13,
"externalLinks": 8,
"footnotes": 5,
"references": 5
},
"actuals": {
"tables": 2,
"diagrams": 0,
"internalLinks": 5,
"externalLinks": 2,
"footnotes": 1,
"references": 0,
"quotesWithQuotes": 0,
"quotesTotal": 0,
"accuracyChecked": 0,
"accuracyTotal": 0
},
"items": {
"summary": "green",
"schedule": "green",
"entity": "green",
"editHistory": "red",
"overview": "green",
"tables": "amber",
"diagrams": "red",
"internalLinks": "amber",
"externalLinks": "amber",
"footnotes": "amber",
"references": "red",
"quotes": "red",
"accuracy": "red"
}
},
"recommendedScore": 19.64
}External Links
No external links
Backlinks (10)
| id | title | type | relationship |
|---|---|---|---|
| anthropic | Anthropic | organization | research |
| why-alignment-easy | Why Alignment Might Be Easy | argument | — |
| anthropic-government-standoff | Anthropic-Pentagon Standoff (2026) | event | — |
| anthropic-investors | Anthropic (Funder) | analysis | — |
| anthropic-stakeholders | Anthropic Stakeholders | table | — |
| anthropic-valuation | Anthropic Valuation Analysis | analysis | — |
| sam-mccandlish | Sam McCandlish | person | — |
| anthropic-core-views | Anthropic Core Views | safety-agenda | — |
| lab-culture | AI Lab Safety Culture | approach | — |
| superintelligence | Superintelligence | concept | — |