# robots.txt for Miraheze # Throttle access to certain pages User-Agent: * Allow: /w/api.php?action=mobileview& Allow: /w/load.php? Disallow: /w/ Disallow: /geoip$ Disallow: /rest_v1/ Disallow: /wiki/Special: Disallow: /wiki/Spezial: Disallow: /wiki/Spesial: Disallow: /wiki/Special%3A Disallow: /wiki/Spezial%3A Disallow: /wiki/Spesial%3A Disallow: /wiki/Property: Disallow: /wiki/Property%3A Disallow: /wiki/property: Disallow: /wiki/Especial: Disallow: /wiki/Especial%3A Disallow: /wiki/especial: # Pattern matching is not officially supported by the robots.txt spec, but some crawlers, like Googlebot, support it Disallow: /wiki/Special:* Disallow: /wiki/Spezial:* Disallow: /wiki/Spesial:* Disallow: /wiki/Special%3A* Disallow: /wiki/Spezial%3A* Disallow: /wiki/Spesial%3A* Disallow: /wiki/Property:* Disallow: /wiki/Property%3A* Disallow: /wiki/property:* Disallow: /wiki/Especial:* Disallow: /wiki/Especial%3A* Disallow: /wiki/especial:* # Block SemrushBot User-Agent: SemrushBot Disallow: / # Block AhrefsBot User-agent: AhrefsBot Disallow: / # Block Bytespider User-agent: Bytespider Disallow: / # Block PetalBot User-agent: PetalBot Disallow: / # Block DotBot User-agent: DotBot Disallow: / # Block MegaIndex User-agent: MegaIndex Disallow: / # Block serpstatbot User-agent: serpstatbot Disallow: / # Block Barkrowler User-agent: Barkrowler Disallow: / # Block SeekportBot User-agent: SeekportBot Disallow: / # Keep Crawl-Delay rules at the bottom # Bots that don't understand Crawl-Delay might break when encountering it # See https://github.com/otwcode/otwarchive/pull/4411#discussion_r1044351129 # Throttle MJ12Bot User-agent: MJ12bot Crawl-Delay: 10 # Throttle YandexBot # TODO: Crawl-delay is not respected since 2018 User-Agent: YandexBot Crawl-Delay: 2.5 # Throttle BingBot User-agent: bingbot Crawl-delay: 5 # # #----------------------------------------------------------# # # # # Dynamic sitemap url Sitemap: https://zenith.nsmbu.net/sitemap.xml # # #----------------------------------------------------------# # # #