# # robots.txt # # First, this is the # robots.txt from http://www.wikipedia.org/ for "other" robots # Thanks Wikipedians! # # Please note: There are a lot of pages on this site, and there are # some misbehaved spiders out there that go _way_ too fast. If you're # irresponsible, your access to the site may be blocked. # User-agent: sistrix Disallow: / User-agent: Orthogaffe Disallow: / # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Sorry, wget in its recursive mode is a frequent problem. # Please read the man page and use it properly; there is a # --wait option you can use to set the delay between hits, # for instance. # User-agent: wget Disallow: / # # The 'grub' distributed client has been *very* poorly behaved. # User-agent: grub-client Disallow: / # # Doesn't follow robots.txt anyway, but... # User-agent: k2spider Disallow: / # # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / #robots que estudian el seo de tu site User-agent: MJ12bot Disallow: / User-agent: spbot Disallow: / #robots que no se sabe que hacen user-agent: sitebot disallow: / #panscient user-agent: panscient.com disallow: / #istellabot tiscali user-agent: istellabot disallow: / #xovibot SMO User-agent: XoviBot Disallow: / #BLEXBot SEO User-agent: BLEXBot Disallow: / #SMTBot identify technologies User-agent: SMTBot Disallow: / #desconocido, muchas peticiones User-agent: Seon Disallow: / #MegaIndex Backlinks spider User-agent: MegaIndex Disallow: / #Pigafetta-Bot visual seo User-agent: Pigafetta Disallow: / #MauiBot User-agent: MauiBot Disallow: / # allow google image bot to search all images User-agent: Googlebot-Image Disallow: User-agent: Mediapartners-Google* Disallow: User-agent: * #Disallow: /wp-admin/ #Disallow: /wp-includes/ Disallow: /trackback/ #Disallow: /wp-content/themes/ #Disallow: /wp-content/plugins/ Disallow: /foro/search.php Disallow: /foro/memberlist.php Disallow: /foro/profile.php #Disallow: /aviso-legal Disallow: /cookies Disallow: /politicadeprivacidad Disallow: /search/ Disallow: /directorio/ Disallow: /ca/ Disallow: /en/ Disallow: /pt/ Disallow: /m/ Disallow: /mobile/ Disallow: /plugins/feedback.php User-Agent: * Disallow: /xmlrpc.php #moz site-explorer User-agent: dotbot Disallow: / User-agent: DomainCrawler Disallow: / User-agent: Screaming Frog SEO Spider Disallow: / User-agent: python-requests Disallow: / User-agent: VelenPublicWebCrawler Disallow: / User-agent: FemtosearchBot Disallow: / User-agent: serpstatbot Disallow: / User-agent: CCBot Disallow: / User-agent: Barkrowler Disallow: / User-agent: Screaming Disallow: / User-agent: GarlikCrawler Disallow: / User-agent: DAUM Disallow: / User-agent: SemrushBot-SA Disallow: / User-agent: SemrushBot Disallow: / User-agent: magpie-crawler Disallow: / User-agent: sentibot Disallow: / User-agent: SentiBot Disallow: / user-agent: AhrefsBot disallow: / User-agent: BacklinkCrawler Disallow: / User-agent: Linguee Disallow: / User-agent: PimEyes Disallow: /