From e6ebca143693791585c70a535d599ef58030c1b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sascha=20I=C3=9Fbr=C3=BCcker?= Date: Sun, 26 Jan 2025 10:58:58 +0200 Subject: [PATCH] Add default robots.txt to block crawlers (#959) --- bookmarks/static/robots.txt | 2 ++ uwsgi.ini | 2 ++ 2 files changed, 4 insertions(+) create mode 100644 bookmarks/static/robots.txt diff --git a/bookmarks/static/robots.txt b/bookmarks/static/robots.txt new file mode 100644 index 0000000..1f53798 --- /dev/null +++ b/bookmarks/static/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / diff --git a/uwsgi.ini b/uwsgi.ini index e9f7b9b..6e2d752 100644 --- a/uwsgi.ini +++ b/uwsgi.ini @@ -4,6 +4,7 @@ env = DJANGO_SETTINGS_MODULE=siteroot.settings.prod static-map = /static=static static-map = /static=data/favicons static-map = /static=data/previews +static-map = /robots.txt=static/robots.txt processes = 2 threads = 2 pidfile = /tmp/linkding.pid @@ -18,6 +19,7 @@ if-env = LD_CONTEXT_PATH static-map = /%(_)static=static static-map = /%(_)static=data/favicons static-map = /%(_)static=data/previews +static-map = /%(_)robots.txt=static/robots.txt endif = if-env = LD_REQUEST_TIMEOUT