# Example robots.txt
# Created by Brandon
# Learn more at http://kb.mediatemple.net
# (mt) Forums - http://kb.mediatemple.net/questions/824/
# (mt) System Status - http://status.mediatemple.net
# (mt) Statement of Support - http://mediatemple.net/support/statement/

# How do I check that my robots.txt file is working as expected?
# http://www.google.com/support/webmasters/bin/answer.py?answer=35237
# Here is a list of Robots: http://www.robotstxt.org/db.html

# Instructions
# Remove the "#" to uncomment any line that you wish to use, but be sure not to uncomment the Description. 

# Grant Robots Access
#######################################################################################

# This example allows all robots to visit all files because the wildcard "*" specifies all robots:
#User-agent: *
#Disallow:

#To allow a single robot you would use the following:
#User-agent: Google
#Disallow:

#User-agent: *
#Disallow: /

# Deny Robots Access
#######################################################################################

# This example keeps all robots out:
#User-agent: *
#Disallow: /

# The next is an example that tells all crawlers not to enter into four directories of a website:
#User-agent: *
#Disallow: /cgi-bin/
#Disallow: /images/
#Disallow: /tmp/
#Disallow: /private/

# Example that tells a specific crawler not to enter one specific directory:
#User-agent: BadBot
#Disallow: /private/

# Example that tells all crawlers not to enter one specific file called foo.html
#User-agent: *
#Disallow: /domains/mt-example.net/foo.html