# /robots.txt as defined in # http://en.wikipedia.org/wiki/Robots_exclusion_standard # http://www.google.com/support/webmasters/bin/answer.py?hl=en&answer=156449 # Thanks, Wikipedia, for the suggestions! User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / User-agent: grub-client Disallow: / User-agent: k2spider Disallow: / User-agent: NPBot Disallow: / User-agent: WebReaper Disallow: / # http://www.80legs.com/webcrawler.html User-agent: 008 Disallow: / User-agent: * Crawl-delay: 3 Disallow: /*/cal_grid/ Disallow: /*/cal_list/ Disallow: /cgi-bin/ Disallow: /cgi-moses/ Disallow: /icons/ Disallow: /stats/ Disallow: /usage/