# https://www.robotstxt.org/robotstxt.html
# Default rules for all crawlers
User-agent: *
Allow: /
Disallow: /admin/
Disallow: /*/admin/
Disallow: /api/
Disallow: /*/api/
Disallow: /offline/
Disallow: /*/offline/
Disallow: /_next/
Disallow: /static/
# Block archived/obsolete pages
Disallow: /fr/test/
Disallow: /en/test/
Disallow: /fr/about-new/
Disallow: /en/about-new/
Disallow: /fr/generic-landing/
Disallow: /en/generic-landing/
Disallow: /fr/app-makedrive/
Disallow: /en/app-makedrive/
# Block URL parameters that create duplicates
Disallow: /*?utm_*
Disallow: /*?fbclid=*
Disallow: /*?gclid=*
# Crawl-delay for non-Google bots (Google ignores this but others respect it)
User-agent: AhrefsBot
Crawl-delay: 10
User-agent: SemrushBot
Crawl-delay: 10
User-agent: MJ12bot
Crawl-delay: 10
# Sitemaps
Sitemap: https://tomorrow-solutions.com/sitemap.xml
Sitemap: https://tomorrow-solutions.com/fr/sitemap.xml
Sitemap: https://tomorrow-solutions.com/en/sitemap.xml