# robots.txt # # This file is to prevent the crawling and indexing of certain parts # of your site by web crawlers and spiders run by sites like Yahoo! # and Google. By telling these "robots" where not to go on your site, # you save bandwidth and server resources. # # This file will be ignored unless it is at the root of your host: # Used: http://example.com/robots.txt # Ignored: http://example.com/site/robots.txt # # For more information about the robots.txt standard, see: # http://www.robotstxt.org/robotstxt.html User-agent: * # CSS, JS, Images Allow: /core/*.css$ Allow: /core/*.css? Allow: /core/*.js$ Allow: /core/*.js? Allow: /core/*.gif Allow: /core/*.jpg Allow: /core/*.jpeg Allow: /core/*.png Allow: /core/*.svg Allow: /profiles/*.css$ Allow: /profiles/*.css? Allow: /profiles/*.js$ Allow: /profiles/*.js? Allow: /profiles/*.gif Allow: /profiles/*.jpg Allow: /profiles/*.jpeg Allow: /profiles/*.png Allow: /profiles/*.svg # Directories Disallow: /core/ Disallow: /profiles/ # Files Disallow: /INSTALL.txt Disallow: /README.txt Disallow: /README.md Disallow: /modules/contrib/*/README.txt Disallow: /modules/contrib/*/README.md Disallow: /modules/custom/*/README.txt Disallow: /modules/custom/*/README.md Disallow: /*/README.txt Disallow: /*/README.md Disallow: /web.config Disallow: /example.gitignore # Paths (clean URLs) Disallow: /admin/ Disallow: /comment/reply/ Disallow: /filter/tips Disallow: /node/add/ Disallow: /search/ Disallow: /user/register Disallow: /user/password Disallow: /user/login Disallow: /user/logout Disallow: /user/ Disallow: /media/oembed Disallow: /*/media/oembed # Paths (no clean URLs) Disallow: /index.php/admin/ Disallow: /index.php/comment/reply/ Disallow: /index.php/filter/tips Disallow: /index.php/node/add/ Disallow: /index.php/search/ Disallow: /index.php/user/password Disallow: /index.php/user/register Disallow: /index.php/user/login Disallow: /index.php/user/logout Disallow: /index.php/user/ Disallow: /index.php/media/oembed Disallow: /index.php/*/media/oembed # Don't crawl Search results pages. Includes keywords, professionals, insights, etc. # Included here (despite seemingly duplicate rule above) because some crawlers get hung up # on the trailing slash in the rule above and continue to allow /search?foo paths. Disallow: /search # Don't allow /professionals facets to be crawled. Disallow: /professionals?f%5B0%5D=* # Don't crawl vcards or print pages. Disallow: /professionals/*/print Disallow: /vcard/*