diff --git a/public/robots.txt b/public/robots.txt index 61e736fd1..eb0c67cc8 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -123,13 +123,27 @@ Disallow: / User-agent: WebReaper Disallow: / -# Per their statement, semrushbot respects crawl-delay directives -# We want them to overall stay within reasonable request rates to -# the backend (20 rps); keeping in mind that the crawl-delay will -# be applied by site and not globally by the bot, 5 seconds seem -# like a reasonable approximation +# Semrush seem to crawl everything. User-agent: SemrushBot -Crawl-delay: 5 +Disallow: / + +User-agent: SiteAuditBot +Disallow: / + +User-agent: SemrushBot-BA +Disallow: / + +User-agent: SemrushBot-SI +Disallow: / + +User-agent: SemrushBot-SWA +Disallow: / + +User-agent: SplitSignalBot +Disallow: / + +User-agent: SemrushBot-OCOB +Disallow: / # # Friendly, low-speed bots are welcome viewing pages, but not