Add default robots.txt to block crawlers (#959)

This commit is contained in:
Sascha Ißbrücker
2025-01-26 10:58:58 +02:00
committed by GitHub
parent 085d67e9f4
commit e6ebca1436
2 changed files with 4 additions and 0 deletions

View File

@@ -0,0 +1,2 @@
User-agent: *
Disallow: /

View File

@@ -4,6 +4,7 @@ env = DJANGO_SETTINGS_MODULE=siteroot.settings.prod
static-map = /static=static
static-map = /static=data/favicons
static-map = /static=data/previews
static-map = /robots.txt=static/robots.txt
processes = 2
threads = 2
pidfile = /tmp/linkding.pid
@@ -18,6 +19,7 @@ if-env = LD_CONTEXT_PATH
static-map = /%(_)static=static
static-map = /%(_)static=data/favicons
static-map = /%(_)static=data/previews
static-map = /%(_)robots.txt=static/robots.txt
endif =
if-env = LD_REQUEST_TIMEOUT