# robots.txt # Allow all common crawlers User-agent: * Allow: / # Allow Googlebot and other Google crawlers User-agent: Googlebot Allow: / User-agent: Googlebot-Image Allow: / User-agent: Googlebot-News Allow: / User-agent: Googlebot-Video Allow: / # Include common crawlers from googlebot.json User-agent: AdobeBot Allow: / User-agent: Applebot Allow: / User-agent: Bingbot Allow: / # Special-case crawlers from special-crawlers.json User-agent: AhrefsBot Allow: / User-agent: SemrushBot Allow: / User-agent: OAI-SearchBot/1.0; +https://openai.com/searchbot Allow: / # User-triggered fetchers from user-triggered-fetchers.json User-agent: Google-Incremental Allow: / User-agent: Google-Site-Verification Allow: / # Additional user-triggered fetchers from user-triggered-fetchers-google.json # Example entries (replace with actual bots from the JSON file) User-agent: Google-User-Triggered-Fetcher-1 Allow: / User-agent: Google-User-Triggered-Fetcher-2 Allow: / # Additional bots from googlebot.json # Example entries (replace with actual bots from the JSON file) User-agent: Googlebot-Special-1 Allow: / User-agent: Googlebot-Special-2 Allow: / # Additional special-case crawlers from special-crawlers.json # Example entries (replace with actual bots from the JSON file) User-agent: Special-Crawler-1 Allow: / User-agent: Special-Crawler-2 Allow: / Sitemap: https://rbuchd.in/sitemap.xml