Long-term Trajectory
long-term-trajectory (E194)← Back to pagePath: /ai-transition-model/long-term-trajectory/
Page Metadata
{
"id": "long-term-trajectory",
"numericId": null,
"path": "/ai-transition-model/long-term-trajectory/",
"filePath": "ai-transition-model/long-term-trajectory.mdx",
"title": "Long-term Trajectory",
"quality": null,
"importance": null,
"contentFormat": "article",
"tractability": null,
"neglectedness": null,
"uncertainty": null,
"causalLevel": null,
"lastUpdated": null,
"llmSummary": "This page contains only a React component reference with no actual content loaded. Cannot assess substance as no text, analysis, or information is present.",
"structuredSummary": null,
"description": null,
"ratings": {
"novelty": 0,
"rigor": 0,
"actionability": 0,
"completeness": 0
},
"category": "ai-transition-model",
"subcategory": "outcomes",
"clusters": [
"ai-safety"
],
"metrics": {
"wordCount": 0,
"tableCount": 0,
"diagramCount": 0,
"internalLinks": 0,
"externalLinks": 0,
"footnoteCount": 0,
"bulletRatio": 0,
"sectionCount": 0,
"hasOverview": false,
"structuralScore": 2
},
"suggestedQuality": 13,
"updateFrequency": null,
"evergreen": true,
"wordCount": 0,
"unconvertedLinks": [],
"unconvertedLinkCount": 0,
"convertedLinkCount": 0,
"backlinkCount": 3,
"redundancy": {
"maxSimilarity": 0,
"similarPages": []
}
}Entity Data
{
"id": "long-term-trajectory",
"type": "ai-transition-model-scenario",
"title": "Long-term Trajectory",
"description": "The quality of humanity's long-term future given successful AI transition—measuring human flourishing, autonomy preservation, and value realization across civilizational timescales.",
"tags": [
"ai-transition-model",
"outcome",
"long-term",
"flourishing"
],
"relatedEntries": [
{
"id": "civilizational-competence",
"type": "ai-transition-model-factor",
"relationship": "driver"
},
{
"id": "ai-ownership",
"type": "ai-transition-model-factor",
"relationship": "driver"
},
{
"id": "long-term-lockin",
"type": "ai-transition-model-scenario",
"relationship": "sub-scenario"
},
{
"id": "human-agency",
"type": "ai-transition-model-parameter",
"relationship": "key-factor"
},
{
"id": "preference-authenticity",
"type": "ai-transition-model-parameter",
"relationship": "key-factor"
}
],
"sources": [],
"lastUpdated": "2026-01",
"customFields": [
{
"label": "Model Role",
"value": "Ultimate Outcome"
},
{
"label": "Primary Drivers",
"value": "Civilizational Competence, AI Ownership"
},
{
"label": "Risk Character",
"value": "Gradual degradation, potentially reversible"
}
]
}Canonical Facts (0)
No facts for this entity
External Links
{
"eaForum": "https://forum.effectivealtruism.org/topics/longtermism",
"eightyK": "https://80000hours.org/articles/future-generations/"
}Backlinks (3)
| id | title | type | relationship |
|---|---|---|---|
| ai-ownership | AI Ownership | ai-transition-model-factor | drives |
| civilizational-competence | Civilizational Competence | ai-transition-model-factor | drives |
| long-term-lockin | Long-term Lock-in | ai-transition-model-scenario | contributes-to |
Frontmatter
{
"title": "Long-term Trajectory",
"sidebar": {
"order": 2
},
"importance": 0,
"quality": 0,
"llmSummary": "This page contains only a React component reference with no actual content loaded. Cannot assess substance as no text, analysis, or information is present.",
"ratings": {
"novelty": 0,
"rigor": 0,
"actionability": 0,
"completeness": 0
},
"clusters": [
"ai-safety"
],
"subcategory": "outcomes"
}Raw MDX Source
---
title: Long-term Trajectory
sidebar:
order: 2
importance: 0
quality: 0
llmSummary: This page contains only a React component reference with no actual content loaded. Cannot assess substance as no text, analysis, or information is present.
ratings:
novelty: 0
rigor: 0
actionability: 0
completeness: 0
clusters:
- ai-safety
subcategory: outcomes
---
import {TransitionModelContent} from '@components/wiki/TransitionModelContent';
<TransitionModelContent entityId="E333" />