# ID: robots.txt 2006/01/17 # Date Created: 2008-07-11 # Date Modified: 2016-07-18/SB # # This is a file retrieved by webwalkers a.k.a. spiders that # conform to a defacto standard. # See # # Any matching one of these patterns will be ignored by Search engine Crawlers. # Use the Disallow: statement to prevent crawlers from indexing specific directories. # # Format is: # User-agent: # Disallow: | # ----------------------------------------------------------------------------- # User-agent: * Disallow: