# Basic robots.txt for most websites # Allow all crawlers to access everything by default User-agent: * Allow: / # Common directories to block (adjust based on your site structure) Disallow: /admin/ Disallow: /private/ Disallow: /tmp/ Disallow: /cgi-bin/ Disallow: /.git/ Disallow: /wp-admin/ Disallow: /wp-includes/ # Block common unwanted files Disallow: *.log$ Disallow: *.sql$ Disallow: *.gz$ # Specify sitemap location (replace with your actual sitemap URL) Sitemap: https://decoland.cz/sitemap.xml # Optional: Crawl delay (use sparingly, can slow indexing) # Crawl-delay: 1