# Initial attempt to slow crawlers. # Not all bots will obey the Crawl-delay, so lets see. User-agent: CCBot Allow: / Crawl-delay: 10 User-agent: ChatGPT-User Allow: / Crawl-delay: 10 User-agent: GPTBot Allow: / Crawl-delay: 10 User-agent: Google-Extended Allow: / Crawl-delay: 10 User-agent: Google-CloudVertexBot Allow: / Crawl-delay: 10 User-agent: Applebot-Extended Allow: / Crawl-delay: 10 User-agent: anthropic-ai Allow: / Crawl-delay: 10 User-agent: ClaudeBot Allow: / Crawl-delay: 10 User-agent: Omgilibot Allow: / Crawl-delay: 10 User-agent: Omgili Allow: / Crawl-delay: 10 User-agent: FacebookBot Allow: / Crawl-delay: 10 User-agent: Diffbot Allow: / Crawl-delay: 10 User-agent: DuckAssistBot Allow: / Crawl-delay: 10 User-agent: AI2Bot Allow: / Crawl-delay: 10 User-agent: Bytespider Allow: / Crawl-delay: 10 User-agent: Kangaroo Bot Allow: / Crawl-delay: 10 User-agent: PanguBot Allow: / Crawl-delay: 10 User-agent: ImagesiftBot Allow: / Crawl-delay: 10 User-agent: PerplexityBot Allow: / Crawl-delay: 10 User-agent: cohere-ai Allow: / Crawl-delay: 10 User-agent: cohere-training-data-crawler Allow: / Crawl-delay: 10 User-agent: Meta-ExternalAgent Allow: / Crawl-delay: 10 User-agent: Meta-ExternalFetcher Allow: / Crawl-delay: 10 User-agent: Timpibot Allow: / Crawl-delay: 10 User-agent: Webzio-Extended Allow: / Crawl-delay: 10 User-agent: YouBot Allow: / Crawl-delay: 10 # These rules from https://meta.wikimedia.org/robots.txt # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Misbehaving: requests much too fast: User-agent: fast Disallow: / # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / # Per their statement, semrushbot respects crawl-delay directives # We want them to overall stay within reasonable request rates to # the backend (20 rps); keeping in mind that the crawl-delay will # be applied by site and not globally by the bot, 5 seconds seem # like a reasonable approximation User-agent: SemrushBot Crawl-delay: 5 # Friendly, low-speed bots are welcome viewing article pages, but not # dynamically-generated pages please. User-agent: * Allow: /w/api.php?action=mobileview& Allow: /w/load.php? Allow: /api/rest_v1/?doc Allow: /w/rest.php/site/v1/sitemap Disallow: /w/ Disallow: /api/ Disallow: /wiki/Special: