# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
# Allow all major search engines and AI crawlers
User-agent: *
Allow: /
# Explicitly allow AI search crawlers
User-agent: OAI-SearchBot
Allow: /
User-agent: GPTBot
Allow: /
User-agent: Google-Extended
Allow: /
User-agent: ChatGPT-User
Allow: /
User-agent: Claude-Web
Allow: /
User-agent: Anthropic-AI
Allow: /
User-agent: PerplexityBot
Allow: /
# Sitemap location
Sitemap: https://scanly.site/sitemap.xml