mirror of
https://github.com/discourse/discourse.git
synced 2024-11-22 08:57:10 -06:00
FEATURE: add Noindex to robots.txt for disallowed routes
This strips pages out of indexes that should not exist see: https://meta.discourse.org/t/pages-listed-in-the-robots-txt-are-crawled-and-indexed-by-google/100309/11?u=sam
This commit is contained in:
parent
4234058358
commit
d84256a876
@ -10,6 +10,7 @@ Crawl-delay: <%= agent[:delay] %>
|
||||
<%- end -%>
|
||||
<% agent[:disallow].each do |path| %>
|
||||
Disallow: <%= path %>
|
||||
Noindex: <%= path %>
|
||||
<% end %>
|
||||
|
||||
|
||||
|
@ -18,6 +18,7 @@ RSpec.describe RobotsTxtController do
|
||||
Discourse.stubs(:base_uri).returns('/forum')
|
||||
get '/robots.txt'
|
||||
expect(response.body).to include("\nDisallow: /forum/admin")
|
||||
expect(response.body).to include("\nNoindex: /forum/admin")
|
||||
end
|
||||
end
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user