# global User-agent: * Disallow: /xmlrpc.php # All Bots User-agent: Disallow: /cgi-bin Disallow: /wp-admin/ Disallow: /wp-includes/ Disallow: /wp-content/ Allow: /wp-content/uploads/ Disallow: /readme.html Disallow: /license.txt Disallow: /search/ Disallow: /?s= Disallow: /? Disallow: /? Disallow: /tag/ Disallow: */feed/ Disallow: /rss/ Disallow: /forums/ Disallow: /page/ Disallow: *?replytocom Allow: / # Google Boot User-agent: Googlebot Disallow: /comments/feed Disallow: /feed/$ Disallow: //feed/$ Disallow: //feed/rss/$ Disallow: ///feed/$ Disallow: /feed/ Disallow: /.php$ Disallow: /.js$ Disallow: /.inc$ Disallow: /.css$ Disallow: /.wmv$ Disallow: /.avi$ Disallow: /.cgi$ Disallow: /*.txt$ Disallow: /?product-page* Disallow: /comment-page* Disallow: /?ak_action=reject_mobile Allow: /ads.txt # Dugg Mirror User-agent: duggmirror Disallow: / # Google AdSense User-agent: Mediapartners-Google Disallow: # Robots que siguen las reglas del fichero robots.txt pero generan consultas abusivas causando lentitud User-agent: MSIECrawler Disallow: / User-agent: WebCopier Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: libwww Disallow: / # AƱadimos un retardo (ms) en cada consulta que realizan los robots causantes de problemas de lentitud User-agent: noxtrumbot Crawl-delay: 50 User-agent: msnbot Crawl-delay: 30 User-agent: Slurp Crawl-delay: 10 # Sitemap # Sitemaps not allowed if strict definition selected.