diff --git a/onlineweb4/urls.py b/onlineweb4/urls.py index ecbd4fb70..16abb041c 100644 --- a/onlineweb4/urls.py +++ b/onlineweb4/urls.py @@ -67,6 +67,10 @@ def get_context_data(self, **kwargs): re_path(r"^wiki/", include("wiki.urls")), ] +# Robots.txt +urlpatterns += [ + re_path(r"^robots.txt$", TemplateView.as_view(template_name="robots.txt", content_type="text/plain")) +] # Onlineweb app urls diff --git a/templates/robots.txt b/templates/robots.txt new file mode 100644 index 000000000..8407bc61d --- /dev/null +++ b/templates/robots.txt @@ -0,0 +1,49 @@ +# Dark Visitors robots.txt + +# AI Data Scraper +# https://darkvisitors.com/agents/anthropic-ai + +User-agent: anthropic-ai +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/bytespider + +User-agent: Bytespider +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/ccbot + +User-agent: CCBot +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/diffbot + +User-agent: Diffbot +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/facebookbot + +User-agent: FacebookBot +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/google-extended + +User-agent: Google-Extended +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/gptbot + +User-agent: GPTBot +Disallow: / + +# AI Data Scraper +# https://darkvisitors.com/agents/omgili + +User-agent: omgili +Disallow: /