# Robots.txt for CookTimePro - Cooking Times & Techniques Guide # Generated automatically during build process # Last updated: 2025-11-01 # Default rules for all search engines User-agent: * Allow: / # Disallow admin, API, and build assets Disallow: /admin/ Disallow: /api/ Disallow: /_astro/ Disallow: /dist/ Disallow: /.astro/ Disallow: /node_modules/ # Disallow search result pages to prevent duplicate content Disallow: /search?* Disallow: /*?q=* Disallow: /*?search=* # Disallow development and testing paths Disallow: /dev/ Disallow: /test/ Disallow: /examples/ # Allow important SEO files explicitly Allow: /sitemap-index.xml Allow: /sitemap-0.xml Allow: /sitemap*.xml Allow: /robots.txt Allow: /manifest.json Allow: /favicon.ico # Google-specific optimizations User-agent: Googlebot Crawl-delay: 1 Allow: / # Bing-specific optimizations User-agent: Bingbot Crawl-delay: 1 Allow: / # Yandex-specific rules User-agent: YandexBot Crawl-delay: 2 Allow: / # Block aggressive crawlers that might impact site performance User-agent: AhrefsBot Crawl-delay: 10 User-agent: SemrushBot Crawl-delay: 10 User-agent: MJ12bot Crawl-delay: 10 # Allow social media crawlers for sharing User-agent: facebookexternalhit/1.1 Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / User-agent: WhatsApp Allow: / # HTTPS enforcement directives # Force HTTPS for all crawler requests to improve SEO ranking User-agent: * Request-rate: 1/1 Visit-time: 0100-2300 # Sitemap locations (HTTPS enforced) Sitemap: https://cooktimepro.com/sitemap-index.xml # Host directive (canonical HTTPS domain) Host: cooktimepro.com # General crawl delay (in seconds) Crawl-delay: 1