From b69acfa8d7f5385735f933a761239be6afd07384 Mon Sep 17 00:00:00 2001 From: oSumAtrIX <johan.melkonyan1@web.de> Date: Fri, 20 Oct 2023 23:19:59 +0200 Subject: [PATCH] feat: Disallow all web crawlers (#111) * feat: Disallow all web crawlers * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- api/__init__.py | 12 +++++++++++- api/robots.py | 10 ++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 api/robots.py diff --git a/api/__init__.py b/api/__init__.py index ac221571..af00dda7 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -9,7 +9,17 @@ from api.donations import donations from api.announcements import announcements from api.login import login +from api.robots import robots api = Blueprint.group( - login, ping, github, info, socials, donations, announcements, compat, url_prefix="/" + login, + ping, + github, + info, + socials, + donations, + announcements, + compat, + robots, + url_prefix="/", ) diff --git a/api/robots.py b/api/robots.py new file mode 100644 index 00000000..6d3a4217 --- /dev/null +++ b/api/robots.py @@ -0,0 +1,10 @@ +from sanic import Blueprint +from sanic.response import text + + +robots: Blueprint = Blueprint("robots") + + +@robots.get("/robots.txt") +async def robots_txt(request): + return text("User-agent: *\nDisallow: /", content_type="text/plain")