Files
Redmine/app/views/welcome/robots.text.erb
Go MAEDA 09db3e7131 robots.txt: disallow crawling dynamically generated PDF documents (#31617).
Patch by Go MAEDA.


git-svn-id: http://svn.redmine.org/redmine/trunk@19867 e93f8b46-1217-0410-a6f0-8f06a7374b81
2020-07-09 00:33:41 +00:00

18 lines
788 B
Plaintext

User-agent: *
<% @projects.each do |project| -%>
<% [project, project.id].each do |p| -%>
Disallow: <%= url_for(:controller => 'repositories', :action => :show, :id => p) %>
Disallow: <%= url_for(project_issues_path(:project_id => p)) %>
Disallow: <%= url_for(project_activity_path(:id => p)) %>
<% end -%>
<% end -%>
Disallow: <%= url_for(issues_gantt_path) %>
Disallow: <%= url_for(issues_calendar_path) %>
Disallow: <%= url_for(activity_path) %>
Disallow: <%= url_for(search_path) %>
Disallow: <%= url_for(issues_path(:sort => '')) %>
Disallow: <%= url_for(issues_path(:query_id => '')) %>
Disallow: <%= url_for(issues_path) %>?*set_filter=
Disallow: <%= url_for(issues_path(:trailing_slash => true)) %>*.pdf$
Disallow: <%= url_for(projects_path(:trailing_slash => true)) %>*.pdf$