- Add ai.txt and llms.txt endpoints for AI/LLM discoverability - Enhance metadata across all pages (canonical URLs, OpenGraph, Twitter) - Add structured data (JSON-LD) to blog index, tag pages - Update robots.txt with AI crawler rules - Improve BlogPosting and CollectionPage schemas
23 lines
611 B
TypeScript
23 lines
611 B
TypeScript
import { MetadataRoute } from 'next';
|
|
|
|
export default function robots(): MetadataRoute.Robots {
|
|
const siteUrl = process.env.NEXT_PUBLIC_SITE_URL || 'http://localhost:3000';
|
|
|
|
return {
|
|
rules: [
|
|
{
|
|
userAgent: '*',
|
|
allow: '/',
|
|
disallow: ['/api/', '/_next/', '/admin/'],
|
|
},
|
|
{
|
|
userAgent: ['GPTBot', 'ChatGPT-User', 'Google-Extended', 'Anthropic-ai', 'ClaudeBot', 'Claude-Web', 'PerplexityBot', 'Cohere-ai'],
|
|
allow: '/',
|
|
disallow: ['/api/', '/_next/', '/admin/'],
|
|
},
|
|
],
|
|
sitemap: `${siteUrl}/sitemap.xml`,
|
|
host: siteUrl,
|
|
};
|
|
}
|