Added a dev-specific robots.txt, which blocks all robots.

- Legacy-Id: 6164
This commit is contained in:
Henrik Levkowetz 2013-09-18 17:44:51 +00:00
parent 68cea34e64
commit 78a0fd7759
2 changed files with 3 additions and 1 deletions

View file

@ -80,6 +80,6 @@ if settings.SERVER_MODE in ('development', 'test'):
urlpatterns += patterns('',
(r'^(?P<path>(?:images|css|js)/.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^(?P<path>secr/(img|css|js)/.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^(?P<path>robots\.txt)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^(?P<path>robots\.txt)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT+"dev/"}),
(r'^_test500/$', lambda x: None),
)

2
static/dev/robots.txt Normal file
View file

@ -0,0 +1,2 @@
User-agent: *
Disallow: /