# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file # # To ban all spiders from the entire site uncomment the next two lines: # User-agent: * # Disallow: / # Tells the User-agent to wait 10 seconds between each request to the server. User-agent: * Crawl-Delay: 10 User-agent: * Disallow: /api/ Disallow: /user/ # Bad bots User-agent: AhrefsBot Disallow: / User-agent: Ahrefs Disallow: / User-agent: Semrush Disallow: / User-agent: SemrushBot Disallow: / User-agent: Yandex Disallow: / User-agent: YandexBot Disallow: / User-agent: Aspiegel Disallow: / User-agent: AspiegelBot Disallow: /