2013-02-05 13:16:51 -06:00
|
|
|
# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
|
|
|
|
#
|
2018-03-15 16:10:45 -05:00
|
|
|
<% @allowed_user_agents.each do |user_agent| %>
|
|
|
|
User-agent: <%= user_agent %>
|
|
|
|
<% end %>
|
2018-04-11 15:05:02 -05:00
|
|
|
<% RobotsTxtController::DISALLOWED_PATHS.each do |path| %>
|
|
|
|
Disallow: <%= Discourse.base_uri + path %>
|
|
|
|
<% end %>
|
2017-02-13 06:01:10 -06:00
|
|
|
|
2018-03-15 16:10:45 -05:00
|
|
|
<% if @disallowed_user_agents %>
|
|
|
|
<% @disallowed_user_agents.each do |user_agent| %>
|
|
|
|
User-agent: <%= user_agent %>
|
2018-04-11 15:05:02 -05:00
|
|
|
Disallow: <%= Discourse.base_uri + "/" %>
|
2018-03-15 16:10:45 -05:00
|
|
|
|
|
|
|
<% end %>
|
|
|
|
<% end %>
|
|
|
|
|
2017-02-13 06:01:10 -06:00
|
|
|
<%= server_plugin_outlet "robots_txt_index" %>
|
2018-04-05 19:15:23 -05:00
|
|
|
|
|
|
|
<% @crawler_delayed_agents.each do |agent, delay| %>
|
|
|
|
User-agent: <%= agent %>
|
|
|
|
Crawl-delay: <%= delay %>
|
|
|
|
<% end %>
|