# Robots.txt for https://usesparrow.com # Generated: 2024 # Allow all crawlers by default User-agent: * Allow: / # Disallow admin and internal paths Disallow: /admin Disallow: /api/ Disallow: /file/ Disallow: /.well-known/ Disallow: /local_assets/ Disallow: /brands-app/ # Block specific file types from indexing Disallow: /*.json$ Disallow: /*.xml$ Disallow: /*.config$ # Allow specific important files Allow: /sitemap.xml Allow: /favicon.ico # Search engine specific rules # Google User-agent: Googlebot Allow: / Crawl-delay: 0 # Bing User-agent: Bingbot Allow: / Crawl-delay: 1 # Common AI/ML crawlers - you may want to block these User-agent: GPTBot Disallow: / User-agent: ChatGPT-User Disallow: / User-agent: CCBot Disallow: / User-agent: anthropic-ai Disallow: / User-agent: Claude-Web Disallow: / # Sitemap location Sitemap: https://usesparrow.com/sitemap.xml # Default crawl delay for other bots Crawl-delay: 1