Existential Risk from AI
existential-risk (E131)← Back to pagePath: /knowledge-base/risks/existential-risk/
Page Metadata
{
"id": "existential-risk",
"numericId": null,
"path": "/knowledge-base/risks/existential-risk/",
"filePath": "knowledge-base/risks/existential-risk.mdx",
"title": "Existential Risk from AI",
"quality": null,
"importance": null,
"contentFormat": "article",
"tractability": null,
"neglectedness": null,
"uncertainty": null,
"causalLevel": null,
"lastUpdated": "2026-02-09",
"llmSummary": null,
"structuredSummary": null,
"description": "Risks that could permanently curtail humanity's potential or cause extinction",
"ratings": null,
"category": "risks",
"subcategory": null,
"clusters": [
"ai-safety"
],
"metrics": {
"wordCount": 7,
"tableCount": 0,
"diagramCount": 0,
"internalLinks": 0,
"externalLinks": 0,
"footnoteCount": 0,
"bulletRatio": 0,
"sectionCount": 0,
"hasOverview": false,
"structuralScore": 2
},
"suggestedQuality": 13,
"updateFrequency": null,
"evergreen": true,
"wordCount": 7,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 2,
"redundancy": {
"maxSimilarity": 0,
"similarPages": []
}
}Entity Data
{
"id": "existential-risk",
"type": "concept",
"title": "Existential Risk from AI",
"description": "Risks that could cause human extinction or permanently curtail humanity's long-term potential.",
"tags": [
"x-risk",
"catastrophic-risk",
"longtermism"
],
"relatedEntries": [
{
"id": "superintelligence",
"type": "concept"
}
],
"sources": [],
"lastUpdated": "2025-12",
"customFields": []
}Canonical Facts (0)
No facts for this entity
External Links
{
"lesswrong": "https://www.lesswrong.com/tag/existential-risk",
"eaForum": "https://forum.effectivealtruism.org/topics/existential-risk",
"stampy": "https://aisafety.info/questions/8mTg/What-is-existential-risk",
"wikidata": "https://www.wikidata.org/wiki/Q16830153",
"eightyK": "https://80000hours.org/articles/existential-risks/"
}Backlinks (2)
| id | title | type | relationship |
|---|---|---|---|
| cais | CAIS | lab-research | — |
| fhi | Future of Humanity Institute | organization | — |
Frontmatter
{
"title": "Existential Risk from AI",
"description": "Risks that could permanently curtail humanity's potential or cause extinction",
"sidebar": {
"order": 50
},
"quality": 0,
"importance": 0,
"lastEdited": "2026-02-09",
"entityType": "risk"
}Raw MDX Source
--- title: "Existential Risk from AI" description: "Risks that could permanently curtail humanity's potential or cause extinction" sidebar: order: 50 quality: 0 importance: 0 lastEdited: "2026-02-09" entityType: risk --- This page is a stub. Content needed.