mirror of
https://github.com/discourse/discourse.git
synced 2024-11-29 20:24:05 -06:00
3d7dbdedc0
This is mainly useful for subfolder sites, who need to expose their robots.txt contents to a parent site.
19 lines
466 B
Plaintext
19 lines
466 B
Plaintext
<%= @robots_info[:header] %>
|
|
<% if Discourse.base_uri.present? %>
|
|
# This robots.txt file is not used. Please append the content below in the robots.txt file located at the root
|
|
<% end %>
|
|
#
|
|
<% @robots_info[:agents].each do |agent| %>
|
|
User-agent: <%= agent[:name] %>
|
|
<%- if agent[:delay] -%>
|
|
Crawl-delay: <%= agent[:delay] %>
|
|
<%- end -%>
|
|
<% agent[:disallow].each do |path| %>
|
|
Disallow: <%= path %>
|
|
<% end %>
|
|
|
|
|
|
<% end %>
|
|
|
|
<%= server_plugin_outlet "robots_txt_index" %>
|