# robots.txt for Drive u 7 Home # This file controls search engine crawler access to the site # Each site in the monorepo should have its own robots.txt # Allow all major search engines User-agent: * Allow: / # Crawl-delay for polite crawling (optional, helps with server load) Crawl-delay: 1 # Disallow admin/private paths (if any) # Disallow: /admin/ # Disallow: /api/ # Allow specific important paths explicitly Allow: /games/ Allow: /search/ Allow: /*.css Allow: /*.js # Sitemap location (update with your actual domain) Sitemap: https://driveu7home.io/sitemap.xml # AI Search Engines - Perplexity User-agent: PerplexityBot Allow: / # AI Search Engines - ChatGPT User-agent: GPTBot Allow: / # AI Search Engines - Claude (Anthropic) User-agent: Claude-Web Allow: / # AI Search Engines - Google Bard/Gemini User-agent: Google-Extended Allow: / # AI Search Engines - Common Crawl (used by many AI models) User-agent: CCBot Allow: / # Block bad bots and scrapers (optional) # User-agent: AhrefsBot # Disallow: / # # User-agent: SemrushBot # Disallow: /