# Robots.txt for ReactionTest.com # This file tells search engine crawlers which pages they can and cannot access User-agent: * Allow: / # Sitemap location Sitemap: https://reactiontest.com/sitemap.xml # Allow all major search engines User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: Baiduspider Allow: / User-agent: YandexBot Allow: / # Block common bot traffic that doesn't add value User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: DotBot Disallow: / # Crawl delay for respectful crawling (optional) Crawl-delay: 1 # Additional directives # Allow access to CSS and JS files for proper rendering Allow: /assets/ Allow: /*.css$ Allow: /*.js$ Allow: /*.png$ Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.gif$ Allow: /*.svg$ Allow: /*.ico$ # Block access to development and system files (if any) Disallow: /src/ Disallow: /.git/ Disallow: /node_modules/ Disallow: /*.json$ Disallow: /*.md$ # Host directive (helps with canonicalization) Host: https://reactiontest.com