# # robots.txt for http://www.financialwebring.org/ and friends # # Re-using some ideas from http://www.wikipedia.org/robots.txt # User-agent: * Disallow: /gummy-stuff/images/ Disallow: /gummy-stuff/Excel/ Disallow: /gummy-stuff/archive/ Disallow: /gummystuff/images/ Disallow: /gummystuff/Excel/ Disallow: /gummystuff/SS/ # 13-Feb-15, P_I, let .htaccess handle these #Disallow: /forum/admin/ #Disallow: /forum/cache/ #Disallow: /forum/db/ #Disallow: /forum/images/ #Disallow: /forum/includes/ #Disallow: /forum/language/ #Disallow: /forum/templates/ #Disallow: /forum/faq.php #Disallow: /forum/groupcp.php #Disallow: /forum/login.php #Disallow: /forum/memberlist.php #Disallow: /forum/modcp.php #Disallow: /forum/posting.php #Disallow: /forum/privmsg.php #Disallow: /forum/profile.php #Disallow: /forum/search.php #Disallow: /forum/viewonline.php # 15-Apr-10, P_I # A new troublemaker, too many agents User-agent: BP Spider Disallow: / # 19-Nov-12, P_I # Per ahrefs.com/robot, turn them off user-agent: AhrefsBot disallow: / # 27-Feb-15, P_I # Don't see this one adding value, plus they run multiple instances User-agent: SemrushBot Disallow: / User-agent: SemrushBot-SA Disallow: / #10-Mar-15, P_I, couple more with multiple instances and no apparent value User-Agent: trendictionbot Disallow: / User-Agent: Yandex Disallow: / #3-Apr-20, P_I, www.sentibot.eu doesn't seem relevant to FWF and gives false positve to Google [Bot] User-Agent: sentibot Disallow: / # Sourced from http://www.wikipedia.org/ # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # # Sorry, wget in its recursive mode is a frequent problem. # Please read the man page and use it properly; there is a # --wait option you can use to set the delay between hits, # for instance. # User-agent: wget Disallow: / # # The 'grub' distributed client has been *very* poorly behaved. # User-agent: grub-client Disallow: / # # Doesn't follow robots.txt anyway, but... # User-agent: k2spider Disallow: / # # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: /