Skip to content
Longterm Wiki

Max Tegmark

max-tegmarkpersonPath: /knowledge-base/people/max-tegmark/
E433Entity ID (EID)
← Back to page23 backlinksQuality: 63Updated: 2026-02-02
Page Recorddatabase.json — merged from MDX frontmatter + Entity YAML + computed metrics at build time
{
  "id": "max-tegmark",
  "wikiId": "E433",
  "path": "/knowledge-base/people/max-tegmark/",
  "filePath": "knowledge-base/people/max-tegmark.mdx",
  "title": "Max Tegmark",
  "quality": 63,
  "readerImportance": 81.5,
  "researchImportance": 39,
  "tacticalValue": 72,
  "contentFormat": "article",
  "causalLevel": null,
  "lastUpdated": "2026-02-02",
  "dateCreated": "2026-02-15",
  "summary": "Comprehensive biographical profile of Max Tegmark covering his transition from cosmology to AI safety advocacy, his role founding the Future of Life Institute, and his controversial Mathematical Universe Hypothesis. The article provides balanced coverage of both his contributions and criticisms, including the 2023 grant controversy and scientific debates about his theoretical work.",
  "description": "Swedish-American physicist at MIT, co-founder of the Future of Life Institute, and prominent AI safety advocate known for his work on the Mathematical Universe Hypothesis and efforts to promote safe artificial intelligence development.",
  "ratings": {
    "novelty": 4,
    "rigor": 7,
    "completeness": 8,
    "actionability": 6
  },
  "category": "people",
  "subcategory": "safety-researchers",
  "clusters": [
    "ai-safety",
    "governance"
  ],
  "metrics": {
    "wordCount": 2631,
    "tableCount": 2,
    "diagramCount": 0,
    "internalLinks": 20,
    "externalLinks": 2,
    "footnoteCount": 72,
    "bulletRatio": 0.05,
    "sectionCount": 23,
    "hasOverview": true,
    "structuralScore": 14
  },
  "suggestedQuality": 93,
  "updateFrequency": 45,
  "evergreen": true,
  "wordCount": 2631,
  "unconvertedLinks": [
    {
      "text": "en.wikipedia.org",
      "url": "https://en.wikipedia.org/wiki/Max_Tegmark",
      "resourceId": "23cbf8a562b3afec",
      "resourceTitle": "Max Tegmark - Wikipedia"
    }
  ],
  "unconvertedLinkCount": 1,
  "convertedLinkCount": 0,
  "backlinkCount": 23,
  "citationHealth": {
    "total": 67,
    "withQuotes": 48,
    "verified": 47,
    "accuracyChecked": 47,
    "accurate": 29,
    "inaccurate": 2,
    "avgScore": 0.9614093893518051
  },
  "hallucinationRisk": {
    "level": "low",
    "score": 30,
    "factors": [
      "biographical-claims",
      "well-cited",
      "high-rigor"
    ]
  },
  "entityType": "person",
  "redundancy": {
    "maxSimilarity": 16,
    "similarPages": [
      {
        "id": "robin-hanson",
        "title": "Robin Hanson",
        "path": "/knowledge-base/people/robin-hanson/",
        "similarity": 16
      },
      {
        "id": "miri-era",
        "title": "The MIRI Era (2000-2015)",
        "path": "/knowledge-base/history/miri-era/",
        "similarity": 15
      },
      {
        "id": "frontier-model-forum",
        "title": "Frontier Model Forum",
        "path": "/knowledge-base/organizations/frontier-model-forum/",
        "similarity": 15
      },
      {
        "id": "dan-hendrycks",
        "title": "Dan Hendrycks",
        "path": "/knowledge-base/people/dan-hendrycks/",
        "similarity": 15
      },
      {
        "id": "eliezer-yudkowsky",
        "title": "Eliezer Yudkowsky",
        "path": "/knowledge-base/people/eliezer-yudkowsky/",
        "similarity": 15
      }
    ]
  },
  "changeHistory": [
    {
      "date": "2026-02-18",
      "branch": "claude/fix-issue-240-N5irU",
      "title": "Surface tacticalValue in /wiki table and score 53 pages",
      "summary": "Added `tacticalValue` to `ExploreItem` interface, `getExploreItems()` mappings, the `/wiki` explore table (new sortable \"Tact.\" column), and the card view sort dropdown. Scored 49 new pages with tactical values (4 were already scored), bringing total to 53.",
      "model": "sonnet-4",
      "duration": "~30min"
    },
    {
      "date": "2026-02-16",
      "branch": "claude/investigate-arxiv-paper-UmGPu",
      "title": "Singapore Consensus on AI Safety",
      "summary": "Investigated arXiv:2506.20702 (The Singapore Consensus on Global AI Safety Research Priorities) and integrated it into the wiki. Updated the international-summits page with a new SCAI section and Mermaid diagram, fixed the broken Singapore Consensus resource in web-other.yaml, updated Bengio/Russell/Tegmark pages with references, created a new dedicated singapore-consensus page with entity E694, and registered the entity in responses.yaml.",
      "pr": 157
    }
  ],
  "coverage": {
    "passing": 6,
    "total": 13,
    "targets": {
      "tables": 11,
      "diagrams": 1,
      "internalLinks": 21,
      "externalLinks": 13,
      "footnotes": 8,
      "references": 8
    },
    "actuals": {
      "tables": 2,
      "diagrams": 0,
      "internalLinks": 20,
      "externalLinks": 2,
      "footnotes": 72,
      "references": 1,
      "quotesWithQuotes": 48,
      "quotesTotal": 67,
      "accuracyChecked": 47,
      "accuracyTotal": 67
    },
    "items": {
      "summary": "green",
      "schedule": "green",
      "entity": "green",
      "editHistory": "green",
      "overview": "green",
      "tables": "amber",
      "diagrams": "red",
      "internalLinks": "amber",
      "externalLinks": "amber",
      "footnotes": "green",
      "references": "amber",
      "quotes": "amber",
      "accuracy": "amber"
    },
    "editHistoryCount": 2,
    "ratingsString": "N:4 R:7 A:6 C:8"
  },
  "readerRank": 78,
  "researchRank": 351,
  "recommendedScore": 180.58
}
External Links
{
  "wikidata": "https://www.wikidata.org/wiki/Q2076321",
  "grokipedia": "https://grokipedia.com/page/Max_Tegmark"
}
Backlinks (23)
idtitletyperelationship
fliFuture of Life Institute (FLI)organization
fliFuture of Life Institute (FLI)organizationleads-to
future-of-life-foundationFuture of Life Foundation (FLF)organization
jaan-tallinnJaan Tallinnperson
pause-debateShould We Pause AI Development?crux
miri-eraThe MIRI Era (2000-2015)historical
provable-safeProvable / Guaranteed Safe AIconcept
metaculusMetaculusorganization
sffSurvival and Flourishing Fund (SFF)organization
david-dalrympleDavid Dalrympleperson
elon-musk-predictionsElon Musk: Track Recordconcept
greg-brockmanGreg Brockmanperson
stuart-russellStuart Russellperson
yann-lecun-predictionsYann LeCun: Track Recordconcept
yann-lecunYann LeCunperson
california-sb1047California SB 1047policy
coordination-mechanismsInternational Coordination Mechanismsconcept
lab-cultureAI Lab Safety Cultureapproach
paris-ai-summitParis AI Action Summit (February 2025)policy
provably-safeProvably Safe AI (davidad agenda)approach
singapore-consensusSingapore Consensus on AI Safety Research Prioritiespolicy
multipolar-trapMultipolar Trap (AI Development)risk
racing-dynamicsAI Development Racing Dynamicsrisk
Longterm Wiki