# suhrkamp: robots.txt # # This file is used to allow crawlers to index our site. # It is NOT used for any other purpose, such as filtering # impressions or clicks. User-agent: * Disallow: /_tasks/ Disallow: /_livebooks/ Disallow: /admin/ Disallow: /cftags/ Disallow: /inc/ Disallow: /warenkorb/ Disallow: /webservice/ User-Agent: bingbot Crawl-delay: 4 User-Agent: Slurp Crawl-delay: 2 User-Agent: Yandex Crawl-delay: 2 User-agent: AhrefsBot Crawl-Delay: 5