diff --git a/.claude/commands/agents.md b/.claude/commands/agents.md
index 3046216b3..46b3f933d 100644
--- a/.claude/commands/agents.md
+++ b/.claude/commands/agents.md
@@ -19,7 +19,7 @@ Run this command when:
2. **If docs changed or `$ARGUMENTS` includes `force`:**
Run the generator script:
```bash
- node scripts/generate-agents-md.js
+ node scripts/agents.js
```
3. **Report the result:**
diff --git a/.github/.githooks/post-commit b/.github/.githooks/post-commit
new file mode 100644
index 000000000..d25e2a1b5
--- /dev/null
+++ b/.github/.githooks/post-commit
@@ -0,0 +1,59 @@
+#!/bin/sh
+# Regenerates docs/agents.md after each commit (when commit message contains "agents.md").
+# If the index changed, creates a follow-up commit.
+#
+# Debug: DEBUG_AGENTS_HOOK=1 git commit ... (or export it for the session)
+
+log() {
+ [ -n "$DEBUG_AGENTS_HOOK" ] && echo "[post-commit] $*" >&2
+}
+
+log "fired (pid=$$)"
+
+# Guard against recursion: this hook re-fires on the follow-up commit below.
+if [ -n "$AGENTS_MD_HOOK_RUNNING" ]; then
+ log "recursion guard hit; exiting"
+ exit 0
+fi
+export AGENTS_MD_HOOK_RUNNING=1
+
+REPO_ROOT=$(git rev-parse --show-toplevel) || exit 0
+cd "$REPO_ROOT" || exit 0
+log "repo root: $REPO_ROOT"
+
+# Only run when the commit message opts in with the trigger token (case-insensitive).
+MSG=$(git log -1 --pretty=%B)
+log "last commit msg: $MSG"
+if ! printf '%s' "$MSG" | grep -qi 'agents\.md'; then
+ log "trigger 'agents.md' NOT in commit message; exiting"
+ exit 0
+fi
+log "trigger matched"
+
+if [ ! -f scripts/agents.js ]; then
+ log "scripts/agents.js NOT FOUND; exiting"
+ exit 0
+fi
+
+log "running: node scripts/agents.js"
+if [ -n "$DEBUG_AGENTS_HOOK" ]; then
+ node scripts/agents.js
+ rc=$?
+else
+ node scripts/agents.js >/dev/null 2>&1
+ rc=$?
+fi
+if [ $rc -ne 0 ]; then
+ echo "post-commit: agents.js failed (exit $rc); skipping" >&2
+ exit 0
+fi
+
+if git diff --quiet -- docs/agents.md 2>/dev/null; then
+ log "docs/agents.md unchanged; nothing to commit"
+ exit 0
+fi
+log "docs/agents.md changed; creating follow-up commit"
+
+git add docs/agents.md
+git commit -m "chore: regenerate docs/agents.md" --no-verify >/dev/null
+log "follow-up commit created"
diff --git a/docs/agents.md b/docs/agents.md
index b32ce2b97..07f0ee624 100644
--- a/docs/agents.md
+++ b/docs/agents.md
@@ -1,6 +1,41 @@
-# Base Docs Index
-IMPORTANT: Prefer retrieval-led reasoning. Read relevant docs before generating code.
-Base is an Ethereum L2 by Coinbase. Docs for: Base Chain, Smart Wallet, OnchainKit, MiniKit.
+---
+title: Base Docs Index
+description: Look up Base documentation with a compact directory-grouped index built for AI coding agents. Lists every markdown page by parent directory so agents find context before generating code.
+---
+# https://docs.base.org/llms.txt
+
+## Base Documentation — LLM Entry Point
+
+> High-signal index of section guides. Jump to a section's llms.txt for concise intros, curated links, and fast navigation.
+
+- [AI Agents](./ai-agents/llms.txt) — Build AI agents that trade, earn, and transact autonomously on Base
+- [Apps](./apps/llms.txt) — A step-by-step guide to building a Next.js tally app on Base using wagmi and viem, with wallet connection, contract reads and writes, and batch transaction support.
+- [Base Account](./base-account/llms.txt)
+- [Base Chain](./base-chain/llms.txt) — Bridge tokens and messages between Base and Solana Mainnet
+- [Get Started](./get-started/llms.txt)
+
+## Tools available for AI assistants
+
+These resources give AI assistants direct access to Base documentation and reusable workflows.
+
+### Base MCP server
+
+`https://docs.base.org/mcp`
+
+### Base skills
+
+AI agents can use Base skills to perform onchain actions directly from their tool loop — no custom integration required. Available skills include:
+
+[https://github.com/base/skills](https://github.com/base/skills)
+
+Install Base skills for your AI assistant:
+
+```
+npx skills add base/base-skills
+```
+
+## Compact docs index
+
[Docs]|root:./docs
|ai-agents:index
|ai-agents/payments:accepting-payments,pay-for-services-with-x402
@@ -69,5 +104,4 @@ Base is an Ethereum L2 by Coinbase. Docs for: Base Chain, Smart Wallet, OnchainK
|base-chain/specs/upgrades/jovian:derivation,exec-engine,l1-attributes,overview,system-config
|base-chain/specs/upgrades/pectra-blob-schedule:derivation,overview
|get-started:base-mentorship-program,base-services-hub,base,block-explorers,concepts,country-leads-and-ambassadors,data-indexers,deploy-smart-contracts,docs-llms,docs-mcp,get-funded,launch-token,learning-resources,prompt-library,resources-for-ai-agents
-|onchainkit:migrate-from-onchainkit
|root:agents,changes,cookie-policy,privacy-policy,terms-of-service,tone_of_voice
diff --git a/docs/changes.md b/docs/changes.md
index 642d253ae..58d5ecc97 100644
--- a/docs/changes.md
+++ b/docs/changes.md
@@ -36,7 +36,7 @@ Cross-links updated in **non-hidden** docs only, e.g. [`get-started/learning-res
- [`docs/apps/**`](apps/) — **Visible** pages (e.g. `guides/migrate-to-standard-web-app`, `growth/rewards`, `technical-guides/base-notifications`) plus `llms.txt` / `llms-full.txt`.
- [`docs/llms.txt`](llms.txt), [`docs/llms-full.txt`](llms-full.txt).
- [`claude.md`](../claude.md) — repo structure diagram.
-- [`scripts/generate-agents-md.js`](../scripts/generate-agents-md.js) — comments.
+- [`scripts/agents.js`](../scripts/agents.js) — comments.
- [`docs/agents.md`](agents.md) — regenerated.
## Verification
diff --git a/docs/onchainkit/migrate-from-onchainkit.mdx b/docs/onchainkit/migrate-from-onchainkit.mdx
deleted file mode 100644
index 6c9754c3a..000000000
--- a/docs/onchainkit/migrate-from-onchainkit.mdx
+++ /dev/null
@@ -1,63 +0,0 @@
----
-title: "Migrate your OnchainKit App"
-description: "How to migrate your OnchainKit app to wagmi and viem using the Base migration skill."
----
-
-OnchainKit is no longer maintained. This guide helps you migrate your existing OnchainKit app to a standalone [wagmi](https://wagmi.sh) project using the Base migration skill.
-
-## Overview
-
-The skill handles three components: `OnchainKitProvider`, `Wallet`, and `Transaction`. For each it:
-
-- Deletes existing OnchainKit component code from your project
-- Creates new wagmi/viem-based replacement files
-- Logs any errors in a `mistakes.md` file at your project root
-
-## Prerequisites
-
-- A Next.js or React project using OnchainKit
-- An AI coding assistant that supports skills (Claude Code or Cursor)
-
-## Install the Skill
-
-```bash
-npx skills add base/base-skills
-```
-
-## Migration Prompts
-
-
-If migrating individually, start with the Provider prompt below. The wallet and transaction components depend on `WagmiProvider` being implemented first.
-
-
-### One-Shot (all three components)
-
-```text
-Migrate my OnchainKit app to use standalone wagmi and viem. Replace the provider, wallet component, and transaction component. Remove the OnchainKit dependency entirely.
-```
-
-### Provider
-
-```text
-Replace my OnchainKitProvider with WagmiProvider and QueryClientProvider.
-```
-
-### Wallet
-
-```text
-Migrate my OnchainKit wallet component to a standalone wagmi-based connect button.
-```
-
-### Transaction
-
-```text
-Migrate my OnchainKit transaction component to use wagmi hooks directly.
-```
-
-## Verify
-
-After migration, run this prompt to confirm the three migrated components are fully replaced:
-
-```text
-Run npm run build and confirm there are no remaining imports of OnchainKitProvider, Wallet, or Transaction from @coinbase/onchainkit in the project.
-```
diff --git a/githooks/README.md b/githooks/README.md
new file mode 100644
index 000000000..37799e31f
--- /dev/null
+++ b/githooks/README.md
@@ -0,0 +1,13 @@
+# Hooks
+
+To enable git hooks for this repo, run the following from the repo root:
+
+```sh
+# 1. Register the hooks directory
+git config core.hooksPath githooks
+
+# 2. Make the post-commit hook executable
+chmod +x githooks/post-commit
+```
+
+Once installed, any commit message containing `agents.md` will automatically regenerate `docs/agents.md`.
diff --git a/githooks/post-commit b/githooks/post-commit
new file mode 100755
index 000000000..d25e2a1b5
--- /dev/null
+++ b/githooks/post-commit
@@ -0,0 +1,59 @@
+#!/bin/sh
+# Regenerates docs/agents.md after each commit (when commit message contains "agents.md").
+# If the index changed, creates a follow-up commit.
+#
+# Debug: DEBUG_AGENTS_HOOK=1 git commit ... (or export it for the session)
+
+log() {
+ [ -n "$DEBUG_AGENTS_HOOK" ] && echo "[post-commit] $*" >&2
+}
+
+log "fired (pid=$$)"
+
+# Guard against recursion: this hook re-fires on the follow-up commit below.
+if [ -n "$AGENTS_MD_HOOK_RUNNING" ]; then
+ log "recursion guard hit; exiting"
+ exit 0
+fi
+export AGENTS_MD_HOOK_RUNNING=1
+
+REPO_ROOT=$(git rev-parse --show-toplevel) || exit 0
+cd "$REPO_ROOT" || exit 0
+log "repo root: $REPO_ROOT"
+
+# Only run when the commit message opts in with the trigger token (case-insensitive).
+MSG=$(git log -1 --pretty=%B)
+log "last commit msg: $MSG"
+if ! printf '%s' "$MSG" | grep -qi 'agents\.md'; then
+ log "trigger 'agents.md' NOT in commit message; exiting"
+ exit 0
+fi
+log "trigger matched"
+
+if [ ! -f scripts/agents.js ]; then
+ log "scripts/agents.js NOT FOUND; exiting"
+ exit 0
+fi
+
+log "running: node scripts/agents.js"
+if [ -n "$DEBUG_AGENTS_HOOK" ]; then
+ node scripts/agents.js
+ rc=$?
+else
+ node scripts/agents.js >/dev/null 2>&1
+ rc=$?
+fi
+if [ $rc -ne 0 ]; then
+ echo "post-commit: agents.js failed (exit $rc); skipping" >&2
+ exit 0
+fi
+
+if git diff --quiet -- docs/agents.md 2>/dev/null; then
+ log "docs/agents.md unchanged; nothing to commit"
+ exit 0
+fi
+log "docs/agents.md changed; creating follow-up commit"
+
+git add docs/agents.md
+git commit -m "chore: regenerate docs/agents.md" --no-verify >/dev/null
+log "follow-up commit created"
diff --git a/scripts/agents.js b/scripts/agents.js
new file mode 100755
index 000000000..d3f9a6de0
--- /dev/null
+++ b/scripts/agents.js
@@ -0,0 +1,331 @@
+#!/usr/bin/env node
+
+/**
+ * Generates docs/agents.md — a human-readable LLM entry point followed by a
+ * compact, minified directory index of all documentation files.
+ *
+ * Pipeline:
+ * 1. loadMintIgnore — reads docs/.mintignore (gitignore-style) to skip files.
+ * 2. discoverTopLevelSections — scans top-level dirs in docs/, humanizes names
+ * (with acronym handling), pulls each section's description from its
+ * index.{md,mdx} or overview.{md,mdx} frontmatter.
+ * 3. discoverFeaturedPages — walks all .md/.mdx files, collects pages whose
+ * frontmatter has `featured: true`. Used to build the "Recommended
+ * starting points" section. Section is omitted if no flagged pages exist.
+ * 4. scanDocs — recursively groups .md/.mdx files by parent directory for
+ * the compact pipe-delimited index at the bottom of the file.
+ * 5. generateAgentsMd — assembles frontmatter + LLM entry point + tools +
+ * featured pages + compact index, writes to docs/agents.md.
+ *
+ * Hardcoded values (per spec): docs URL, MCP URL, skills repo URL/install cmd.
+ * Everything else is derived from the repo at run time.
+ *
+ * To surface a page under "Recommended starting points", add to its frontmatter:
+ * featured: true
+ * order: 10 # optional sort key (lower = earlier)
+ *
+ * Usage: node scripts/agents.js
+ */
+
+const fs = require('fs');
+const path = require('path');
+
+const CONFIG = {
+ docsDir: './docs',
+ outputFile: './docs/agents.md',
+
+ // The only hardcoded external references allowed.
+ docsUrl: 'https://docs.base.org',
+ mcpUrl: 'https://docs.base.org/mcp',
+ skillsRepoUrl: 'https://github.com/base/skills',
+ skillsInstallCmd: 'npx skills add base/base-skills',
+
+ // Acronyms preserved as upper-case when humanizing directory names.
+ acronyms: new Set(['AI', 'MCP', 'API', 'SDK', 'L2', 'EVM', 'NFT', 'DAO', 'P2P', 'RPC']),
+
+ // Files searched (in order) for a section's description. Looked up directly
+ // inside the section dir; if none match, the script falls back to the first
+ // file inside /quickstart/.
+ sectionIndexFiles: ['index.mdx', 'index.md', 'overview.mdx', 'overview.md'],
+ sectionFallbackDirs: ['quickstart'],
+
+ skipFiles: [
+ 'README.md', 'CHANGELOG.md', 'LICENSE.md', '.DS_Store',
+ 'docs.json', 'package-lock.json', 'llms.txt', 'llms-full.txt',
+ 'iframe-theme.js', 'style.css', 'instructions.md', 'writing.md', 'CLAUDE.md'
+ ],
+
+ skipFilePatterns: [/-vid$/, /-video$/, /-sbs$/],
+
+ skipDirs: [
+ 'node_modules', '.git', 'dist', 'build', 'coverage',
+ '.next', 'images', 'videos', 'logo', 'openapi', '.claude', 'snippets'
+ ],
+
+ extensions: ['.md', '.mdx']
+};
+
+function humanize(name) {
+ const cleaned = name.replace(/^\d+-/, '');
+ return cleaned.split('-').map(word => {
+ if (!word) return '';
+ const upper = word.toUpperCase();
+ if (CONFIG.acronyms.has(upper)) return upper;
+ return word.charAt(0).toUpperCase() + word.slice(1);
+ }).join(' ');
+}
+
+function stripNumericPrefixes(relPath) {
+ return relPath.split('/').map(seg => seg.replace(/^\d+-/, '')).join('/');
+}
+
+function parseFrontmatter(content) {
+ const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/);
+ if (!match) return { frontmatter: {}, body: content };
+
+ const fm = {};
+ for (const line of match[1].split(/\r?\n/)) {
+ const m = line.match(/^([a-zA-Z_][a-zA-Z0-9_-]*):\s*(.*)$/);
+ if (!m) continue;
+ let value = m[2].trim();
+ if ((value.startsWith('"') && value.endsWith('"')) ||
+ (value.startsWith("'") && value.endsWith("'"))) {
+ value = value.slice(1, -1);
+ }
+ if (value === 'true') value = true;
+ else if (value === 'false') value = false;
+ else if (/^-?\d+$/.test(value)) value = Number(value);
+ fm[m[1]] = value;
+ }
+ return { frontmatter: fm, body: match[2] };
+}
+
+function loadMintIgnore(mintignorePath) {
+ const ignored = { dirs: new Set(), files: new Set(), bareFiles: new Set() };
+ if (!fs.existsSync(mintignorePath)) return ignored;
+
+ for (const line of fs.readFileSync(mintignorePath, 'utf8').split('\n')) {
+ const trimmed = line.trim();
+ if (!trimmed || trimmed.startsWith('#')) continue;
+ if (trimmed.endsWith('/*')) {
+ ignored.dirs.add(trimmed.slice(1, -2));
+ } else if (trimmed.startsWith('/')) {
+ ignored.files.add(trimmed.slice(1));
+ } else {
+ ignored.bareFiles.add(trimmed.replace(/\.mdx?$/, ''));
+ }
+ }
+ return ignored;
+}
+
+function descriptionFromFile(file) {
+ if (!fs.existsSync(file)) return '';
+ const { frontmatter } = parseFrontmatter(fs.readFileSync(file, 'utf8'));
+ return frontmatter.description ? String(frontmatter.description).trim() : '';
+}
+
+function firstDocFileIn(dir) {
+ if (!fs.existsSync(dir)) return '';
+ const candidates = fs.readdirSync(dir, { withFileTypes: true })
+ .filter(e => e.isFile()
+ && CONFIG.extensions.includes(path.extname(e.name).toLowerCase())
+ && !CONFIG.skipFiles.includes(e.name))
+ .map(e => e.name)
+ .sort();
+ return candidates.length ? path.join(dir, candidates[0]) : '';
+}
+
+function readSectionDescription(sectionDir) {
+ // 1. Section-level index/overview file
+ for (const candidate of CONFIG.sectionIndexFiles) {
+ const desc = descriptionFromFile(path.join(sectionDir, candidate));
+ if (desc) return desc;
+ }
+ // 2. First file inside a known fallback subdir (e.g. quickstart/)
+ for (const sub of CONFIG.sectionFallbackDirs) {
+ const first = firstDocFileIn(path.join(sectionDir, sub));
+ if (first) {
+ const desc = descriptionFromFile(first);
+ if (desc) return desc;
+ }
+ }
+ return '';
+}
+
+function discoverTopLevelSections() {
+ if (!fs.existsSync(CONFIG.docsDir)) return [];
+ return fs.readdirSync(CONFIG.docsDir, { withFileTypes: true })
+ .filter(e => e.isDirectory()
+ && !CONFIG.skipDirs.includes(e.name)
+ && !e.name.startsWith('.'))
+ .map(e => {
+ const cleanName = e.name.replace(/^\d+-/, '');
+ return {
+ slug: cleanName,
+ title: humanize(e.name),
+ description: readSectionDescription(path.join(CONFIG.docsDir, e.name)),
+ llmsPath: `./${cleanName}/llms.txt`,
+ };
+ })
+ .sort((a, b) => a.title.localeCompare(b.title));
+}
+
+function walkDocFiles(dir, results = []) {
+ if (!fs.existsSync(dir)) return results;
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
+ if (entry.name.startsWith('.')) continue;
+ if (CONFIG.skipFiles.includes(entry.name)) continue;
+ const full = path.join(dir, entry.name);
+ if (entry.isDirectory()) {
+ if (CONFIG.skipDirs.includes(entry.name)) continue;
+ walkDocFiles(full, results);
+ } else if (CONFIG.extensions.includes(path.extname(entry.name).toLowerCase())) {
+ results.push(full);
+ }
+ }
+ return results;
+}
+
+function discoverFeaturedPages() {
+ const featured = [];
+ for (const file of walkDocFiles(CONFIG.docsDir)) {
+ const { frontmatter } = parseFrontmatter(fs.readFileSync(file, 'utf8'));
+ if (frontmatter.featured !== true) continue;
+
+ const relWithExt = path.relative(CONFIG.docsDir, file).replace(/\\/g, '/');
+ const rel = stripNumericPrefixes(relWithExt.replace(/\.mdx?$/, ''));
+ const title = frontmatter.title
+ ? String(frontmatter.title)
+ : humanize(path.basename(file, path.extname(file)));
+ featured.push({
+ title,
+ url: `${CONFIG.docsUrl}/${rel}`,
+ order: typeof frontmatter.order === 'number' ? frontmatter.order : 9999,
+ });
+ }
+ return featured.sort((a, b) => a.order - b.order || a.title.localeCompare(b.title));
+}
+
+function scanDocs(dir, basePath = '', ignored) {
+ const index = {};
+ if (!fs.existsSync(dir)) return index;
+
+ const files = [];
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
+ if (CONFIG.skipFiles.includes(entry.name) || entry.name.startsWith('.')) continue;
+ const fullPath = path.join(dir, entry.name);
+
+ if (entry.isDirectory()) {
+ if (CONFIG.skipDirs.includes(entry.name)) continue;
+ Object.assign(index, scanDocs(fullPath, basePath ? `${basePath}/${entry.name}` : entry.name, ignored));
+ } else {
+ const ext = path.extname(entry.name).toLowerCase();
+ if (!CONFIG.extensions.includes(ext)) continue;
+ const baseName = entry.name.replace(/\.mdx?$/, '');
+ if (CONFIG.skipFilePatterns.some(p => p.test(baseName))) continue;
+ if (ignored.bareFiles.has(baseName)) continue;
+ if (ignored.files.has(basePath ? `${basePath}/${baseName}` : baseName)) continue;
+ files.push(baseName);
+ }
+ }
+
+ if (files.length > 0 && !ignored.dirs.has(basePath)) {
+ const key = (basePath || 'root').replace(/\/?\d+-/g, '/').replace(/^\//, '');
+ index[key] = files;
+ }
+ return index;
+}
+
+function buildEntryPointSection(sections) {
+ const bullets = sections.map(s => {
+ const desc = s.description ? ` — ${s.description}` : '';
+ return `- [${s.title}](${s.llmsPath})${desc}`;
+ }).join('\n');
+
+ return `## Base Documentation — LLM Entry Point
+
+> High-signal index of section guides. Jump to a section's llms.txt for concise intros, curated links, and fast navigation.
+
+${bullets}`;
+}
+
+function buildToolsSection() {
+ return `## Tools available for AI assistants
+
+These resources give AI assistants direct access to Base documentation and reusable workflows.
+
+### Base MCP server
+
+\`${CONFIG.mcpUrl}\`
+
+### Base skills
+
+AI agents can use Base skills to perform onchain actions directly from their tool loop — no custom integration required. Available skills include:
+
+[${CONFIG.skillsRepoUrl}](${CONFIG.skillsRepoUrl})
+
+Install Base skills for your AI assistant:
+
+\`\`\`
+${CONFIG.skillsInstallCmd}
+\`\`\``;
+}
+
+function buildFeaturedSection(featured) {
+ if (featured.length === 0) return '';
+ const bullets = featured.map(f => `- [${f.title}](${f.url})`).join('\n');
+ return `## Recommended starting points
+
+Narrow context to a specific type of work:
+
+${bullets}`;
+}
+
+function generateAgentsMd() {
+ const ignored = loadMintIgnore(`${CONFIG.docsDir}/.mintignore`);
+ const index = scanDocs(CONFIG.docsDir, '', ignored);
+
+ const indexLines = Object.entries(index)
+ .sort(([a], [b]) => a.localeCompare(b))
+ .map(([dir, files]) => `|${dir}:${files.join(',')}`);
+
+ // Frontmatter description rules: ≤200 chars, action-oriented, complete
+ // sentences, no "you can"/"users can"/"this page explains", includes
+ // "with [tool]" scoping, no colons in value, plain text, no versions,
+ // avoid "teaching"/"enable"/"disable".
+ const description = 'Look up Base documentation with a compact directory-grouped index built for AI coding agents. Lists every markdown page by parent directory so agents find context before generating code.';
+ if (description.length > 200) {
+ throw new Error(`agents.md description exceeds 200 chars (${description.length})`);
+ }
+
+ const sections = discoverTopLevelSections();
+ const featured = discoverFeaturedPages();
+
+ const blocks = [
+ `# ${CONFIG.docsUrl}/llms.txt`,
+ buildEntryPointSection(sections),
+ buildToolsSection(),
+ buildFeaturedSection(featured),
+ `## Compact docs index\n\n[Docs]|root:./docs\n${indexLines.join('\n')}`,
+ ].filter(Boolean);
+
+ const content = `---
+title: Base Docs Index
+description: ${description}
+---
+${blocks.join('\n\n')}
+`;
+
+ fs.writeFileSync(CONFIG.outputFile, content);
+
+ const size = Buffer.byteLength(content, 'utf8');
+ console.log(`Generated: ${CONFIG.outputFile}`);
+ console.log(`Size: ${(size / 1024).toFixed(2)} KB`);
+ console.log(`Sections: ${sections.length}`);
+ console.log(`Featured pages: ${featured.length}`);
+ console.log(`Index entries: ${indexLines.length} directories`);
+ console.log('');
+ console.log(`A new ${CONFIG.outputFile} has been generated. Review changes with: git diff ${CONFIG.outputFile}`);
+}
+
+generateAgentsMd();
diff --git a/scripts/generate-agents-md.js b/scripts/generate-agents-md.js
deleted file mode 100755
index 3e5b9c9a2..000000000
--- a/scripts/generate-agents-md.js
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env node
-
-/**
- * Generates a minified AGENTS.md with compact docs index
- * Groups doc files by directory using pipe-delimited structure
- *
- * Usage: node scripts/generate-agents-md.js
- */
-
-const fs = require('fs');
-const path = require('path');
-
-const CONFIG = {
- docsDir: './docs',
- outputFile: './docs/agents.md',
-
- skipFiles: [
- 'README.md', 'CHANGELOG.md', 'LICENSE.md', '.DS_Store',
- 'docs.json', 'package-lock.json', 'llms.txt', 'llms-full.txt',
- 'iframe-theme.js', 'style.css', 'instructions.md', 'writing.md', 'CLAUDE.md'
- ],
-
- skipFilePatterns: [/-vid$/, /-video$/, /-sbs$/],
-
- skipDirs: [
- 'node_modules', '.git', 'dist', 'build', 'coverage',
- '.next', 'images', 'videos', 'logo', 'openapi', '.claude', 'snippets'
- ],
-
- extensions: ['.md', '.mdx']
-};
-
-function loadMintIgnore(mintignorePath) {
- const ignored = { dirs: new Set(), files: new Set(), bareFiles: new Set() };
-
- if (!fs.existsSync(mintignorePath)) return ignored;
-
- for (const line of fs.readFileSync(mintignorePath, 'utf8').split('\n')) {
- const trimmed = line.trim();
- if (!trimmed || trimmed.startsWith('#')) continue;
-
- if (trimmed.endsWith('/*')) {
- // /apps/core-concepts/* → skip all files in that dir
- ignored.dirs.add(trimmed.slice(1, -2));
- } else if (trimmed.startsWith('/')) {
- // /apps/growth/build-viral-apps → skip specific file
- ignored.files.add(trimmed.slice(1));
- } else {
- // writing.md → skip by bare filename anywhere
- ignored.bareFiles.add(trimmed.replace(/\.mdx?$/, ''));
- }
- }
-
- return ignored;
-}
-
-function scanDocs(dir, basePath = '', ignored = { dirs: new Set(), files: new Set(), bareFiles: new Set() }) {
- const index = {};
-
- if (!fs.existsSync(dir)) return index;
-
- const entries = fs.readdirSync(dir, { withFileTypes: true });
- const files = [];
-
- for (const entry of entries) {
- if (CONFIG.skipFiles.includes(entry.name) || entry.name.startsWith('.')) continue;
-
- const fullPath = path.join(dir, entry.name);
- const relPath = basePath ? `${basePath}/${entry.name}` : entry.name;
-
- if (entry.isDirectory()) {
- if (CONFIG.skipDirs.includes(entry.name)) continue;
- Object.assign(index, scanDocs(fullPath, relPath, ignored));
- } else {
- const ext = path.extname(entry.name).toLowerCase();
- if (CONFIG.extensions.includes(ext)) {
- const baseName = entry.name.replace(/\.mdx?$/, '');
- if (CONFIG.skipFilePatterns.some(p => p.test(baseName))) continue;
- if (ignored.bareFiles.has(baseName)) continue;
- if (ignored.files.has(basePath ? `${basePath}/${baseName}` : baseName)) continue;
- files.push(baseName);
- }
- }
- }
-
- if (files.length > 0 && !ignored.dirs.has(basePath)) {
- const key = (basePath || 'root').replace(/\/?\d+-/g, '/').replace(/^\//, '');
- index[key] = files;
- }
-
- return index;
-}
-
-function generateMinified() {
- const ignored = loadMintIgnore(`${CONFIG.docsDir}/.mintignore`);
- const index = scanDocs(CONFIG.docsDir, '', ignored);
-
- // Build minified index lines
- const lines = Object.entries(index)
- .sort(([a], [b]) => a.localeCompare(b))
- .map(([dir, files]) => `|${dir}:${files.join(',')}`);
-
- // Minified output - minimal whitespace
- const content = `# Base Docs Index
-IMPORTANT: Prefer retrieval-led reasoning. Read relevant docs before generating code.
-Base is an Ethereum L2 by Coinbase. Docs for: Base Chain, Smart Wallet, OnchainKit, MiniKit.
-[Docs]|root:./docs
-${lines.join('\n')}
-`;
-
- fs.writeFileSync(CONFIG.outputFile, content);
-
- const size = Buffer.byteLength(content, 'utf8');
- console.log(`Generated: ${CONFIG.outputFile}`);
- console.log(`Size: ${(size / 1024).toFixed(2)} KB`);
- console.log(`Entries: ${lines.length} directories`);
-}
-
-generateMinified();