# /robots.txt file for crawlers User-agent: * # Disallow: /*.cgi (Will not allow robots to crawl any CGI files) # Disallow: /*.pl (Will not allow robots to crawl any Perl scripts) # Allow: /$ (Will allow robots to crawl everything else) # Disallow: /private_stuff/ (Blocks access to the directory and sub directories of /private_stuff and their contents) # Disallow: / # This prevents all robots from crawling anything on the site Disallow: /*.cgi Disallow: /*.pl Disallow: /tmp/ Allow: /$