AI Takeover
ai-takeover (E15)← Back to pagePath: /knowledge-base/risks/ai-takeover/
Page Metadata
{
"id": "ai-takeover",
"numericId": null,
"path": "/knowledge-base/risks/ai-takeover/",
"filePath": "knowledge-base/risks/ai-takeover.mdx",
"title": "AI Takeover",
"quality": null,
"importance": null,
"contentFormat": "article",
"tractability": null,
"neglectedness": null,
"uncertainty": null,
"causalLevel": null,
"lastUpdated": "2026-02-09",
"llmSummary": null,
"structuredSummary": null,
"description": "Scenarios where AI systems seize control from humans",
"ratings": null,
"category": "risks",
"subcategory": null,
"clusters": [
"ai-safety"
],
"metrics": {
"wordCount": 7,
"tableCount": 0,
"diagramCount": 0,
"internalLinks": 0,
"externalLinks": 0,
"footnoteCount": 0,
"bulletRatio": 0,
"sectionCount": 0,
"hasOverview": false,
"structuralScore": 2
},
"suggestedQuality": 13,
"updateFrequency": null,
"evergreen": true,
"wordCount": 7,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 2,
"redundancy": {
"maxSimilarity": 0,
"similarPages": []
}
}Entity Data
{
"id": "ai-takeover",
"type": "ai-transition-model-scenario",
"title": "AI Takeover",
"description": "Scenarios where AI systems pursue goals misaligned with human values at scale, potentially resulting in human disempowerment or extinction.",
"tags": [
"ai-transition-model",
"scenario",
"x-risk",
"misalignment"
],
"relatedEntries": [
{
"id": "existential-catastrophe",
"type": "ai-transition-model-scenario",
"relationship": "contributes-to"
},
{
"id": "misalignment-potential",
"type": "ai-transition-model-factor",
"relationship": "driven-by"
},
{
"id": "alignment-robustness",
"type": "ai-transition-model-parameter",
"relationship": "mitigated-by"
}
],
"sources": [],
"lastUpdated": "2025-12",
"customFields": [
{
"label": "Model Role",
"value": "Catastrophic Scenario"
},
{
"label": "Primary Drivers",
"value": "Misalignment Potential"
},
{
"label": "Sub-scenarios",
"value": "Gradual takeover, Rapid takeover"
}
]
}Canonical Facts (0)
No facts for this entity
External Links
{
"wikipedia": "https://en.wikipedia.org/wiki/AI_takeover",
"wikidata": "https://www.wikidata.org/wiki/Q2254427",
"eightyK": "https://80000hours.org/problem-profiles/risks-from-power-seeking-ai/"
}Backlinks (2)
| id | title | type | relationship |
|---|---|---|---|
| misalignment-potential | Misalignment Potential | ai-transition-model-factor | enables |
| existential-catastrophe | Existential Catastrophe | ai-transition-model-scenario | sub-scenario |
Frontmatter
{
"title": "AI Takeover",
"description": "Scenarios where AI systems seize control from humans",
"sidebar": {
"order": 50
},
"quality": 0,
"importance": 0,
"lastEdited": "2026-02-09",
"entityType": "risk"
}Raw MDX Source
--- title: "AI Takeover" description: "Scenarios where AI systems seize control from humans" sidebar: order: 50 quality: 0 importance: 0 lastEdited: "2026-02-09" entityType: risk --- This page is a stub. Content needed.