From a2bb6c71621f03e3589a97054a061da699ca97d8 Mon Sep 17 00:00:00 2001 From: Daniel O'Connor Date: Mon, 27 Apr 2026 17:19:31 +0930 Subject: [PATCH] Ban Semrush --- public/robots.txt | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/public/robots.txt b/public/robots.txt index 61e736fd1..eb0c67cc8 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -123,13 +123,27 @@ Disallow: / User-agent: WebReaper Disallow: / -# Per their statement, semrushbot respects crawl-delay directives -# We want them to overall stay within reasonable request rates to -# the backend (20 rps); keeping in mind that the crawl-delay will -# be applied by site and not globally by the bot, 5 seconds seem -# like a reasonable approximation +# Semrush seem to crawl everything. User-agent: SemrushBot -Crawl-delay: 5 +Disallow: / + +User-agent: SiteAuditBot +Disallow: / + +User-agent: SemrushBot-BA +Disallow: / + +User-agent: SemrushBot-SI +Disallow: / + +User-agent: SemrushBot-SWA +Disallow: / + +User-agent: SplitSignalBot +Disallow: / + +User-agent: SemrushBot-OCOB +Disallow: / # # Friendly, low-speed bots are welcome viewing pages, but not