Allow crawling by default

This commit is contained in:
Chocobozzz 2018-05-22 11:02:10 +02:00
parent f954b5da2a
commit 78881bc4fa
No known key found for this signature in database
GPG Key ID: 583A612D890159BE
2 changed files with 4 additions and 4 deletions

View File

@ -91,10 +91,10 @@ instance:
customizations:
javascript: '' # Directly your JavaScript code (without <script> tags). Will be eval at runtime
css: '' # Directly your CSS code (without <style> tags). Will be injected at runtime
# Robot.txt rules. To allow robots to crawl your instance and allow indexation of your site, remove the '/'
# Robot.txt rules. To disallow robots to crawl your instance and disallow indexation of your site, add '/' to "Disallow:'
robots: |
User-agent: *
Disallow: /
Disallow: ''
services:
# Cards configuration to format video in Twitter

View File

@ -107,10 +107,10 @@ instance:
customizations:
javascript: '' # Directly your JavaScript code (without <script> tags). Will be eval at runtime
css: '' # Directly your CSS code (without <style> tags). Will be injected at runtime
# Robot.txt rules. To allow robots to crawl your instance and allow indexation of your site, remove the '/'
# Robot.txt rules. To disallow robots to crawl your instance and disallow indexation of your site, add '/' to "Disallow:'
robots: |
User-agent: *
Disallow: /
Disallow: ''
services:
# Cards configuration to format video in Twitter