# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file # <% @allowed_user_agents.each do |user_agent| %> User-agent: <%= user_agent %> <% end %> <% RobotsTxtController::DISALLOWED_PATHS.each do |path| %> Disallow: <%= Discourse.base_uri + path %> <% end %> <% if @disallowed_user_agents %> <% @disallowed_user_agents.each do |user_agent| %> User-agent: <%= user_agent %> Disallow: <%= Discourse.base_uri + "/" %> <% end %> <% end %> <%= server_plugin_outlet "robots_txt_index" %> <% @crawler_delayed_agents.each do |agent, delay| %> User-agent: <%= agent %> Crawl-delay: <%= delay %> <% end %>