# Lets see if this keeps hawk.doit.state.ct.us out User-agent: Ultraseek Disallow: / #Disallow Microsoft URL Control bot User-agent: Microsoft URL Control Disallow: / #Disallow heritrix/1.12.1 User-agent: heritrix/1.12.1 Disallow: / User-agent: discobot Disallow: / # Don't want robots indexing "private" pages User-agent: * Disallow: /secure/ Disallow: /help/askHomer/ Disallow: /junk/ # Don't want robots indexing directories User-agent: * Disallow: /webapps/ Disallow: /liaisons/ Disallow: /cgi-bin/ Disallow: /services/ill/vcpage.html Disallow: /help/comments/ Disallow: /help/askHomer/ Disallow: /junk/ # Disallow googlebot to index directory and file User-Agent: Googlebot/2.1 Disallow: /liaisons/ Disallow: /webapps Disallow: /help/askHomer Disallow: /junk/ #Disallow Gigabot User-Agent: Gigabot/2.0 Disallow: /liaisons/ Disallow: /webapps/ Disallow: /about/publications/journalprices.cfm Disallow: /help/askHomer Disallow: /junk/ # disallow LinkLint any access directory and file User-agent: LinkLint Disallow: /webapps/ Disallow: /liaisons/ Disallow: /junk/ # Disallow htdig to index this directory and file User-Agent: htdig Disallow: /webapps/ Disallow: /liaisons/ Disallow: /junk/ # Disallow Yahoo! Slurp to index directory and file User-Agent: Slurp Disallow: /webapps/ Disallow: /liaisons/ Disallow: /help/askHomer Disallow: /junk/ # Disallow access to liaisons User-Agent: * Disallow: /liaisons/ Disallow: /junk/ # Disallow askKeevesAllow only specific directories User-agent: Teoma Disallow: /webapps/ Disallow: /liaisons/ Disallow: /help/askHomer Disallow: /junk/