From fe6462eba779262c14cae23b3bd80027178351ba Mon Sep 17 00:00:00 2001 From: oSumAtrIX Date: Fri, 20 Oct 2023 20:51:06 +0200 Subject: [PATCH 1/2] feat: Disallow all web crawlers --- api/__init__.py | 3 ++- api/robots.py | 9 +++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 api/robots.py diff --git a/api/__init__.py b/api/__init__.py index ac221571..de2b0258 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -9,7 +9,8 @@ from api.donations import donations from api.announcements import announcements from api.login import login +from api.robots import robots api = Blueprint.group( - login, ping, github, info, socials, donations, announcements, compat, url_prefix="/" + login, ping, github, info, socials, donations, announcements, compat, robots, url_prefix="/" ) diff --git a/api/robots.py b/api/robots.py new file mode 100644 index 00000000..cc7cc237 --- /dev/null +++ b/api/robots.py @@ -0,0 +1,9 @@ +from sanic import Blueprint +from sanic.response import text + + +robots: Blueprint = Blueprint("robots") + +@robots.get("/robots.txt") +async def robots_txt(request): + return text("User-agent: *\nDisallow: /", content_type='text/plain') From 406547d3e72b84c1cfd135018fba399c5a5230a0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 20 Oct 2023 18:53:38 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- api/__init__.py | 11 ++++++++++- api/robots.py | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/api/__init__.py b/api/__init__.py index de2b0258..af00dda7 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -12,5 +12,14 @@ from api.robots import robots api = Blueprint.group( - login, ping, github, info, socials, donations, announcements, compat, robots, url_prefix="/" + login, + ping, + github, + info, + socials, + donations, + announcements, + compat, + robots, + url_prefix="/", ) diff --git a/api/robots.py b/api/robots.py index cc7cc237..6d3a4217 100644 --- a/api/robots.py +++ b/api/robots.py @@ -4,6 +4,7 @@ robots: Blueprint = Blueprint("robots") + @robots.get("/robots.txt") async def robots_txt(request): - return text("User-agent: *\nDisallow: /", content_type='text/plain') + return text("User-agent: *\nDisallow: /", content_type="text/plain")