# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# Default: allow all crawlers
User-agent: *
Allow: /
# GPTBot - Allow public content pages
User-agent: GPTBot
Allow: /themes/
Allow: /developers/
Allow: /glossary
Allow: /api-docs
Allow: /choose-a-theme
Allow: /about
Allow: /partners
Allow: /changelog
Allow: /charts
Allow: /collections
Allow: /llms.txt
Allow: /llms-full.txt
Allow: /sitemap
Disallow: /admin/
Disallow: /dashboard/
Disallow: /users/
Disallow: /checkouts/
Disallow: /pay/
# ClaudeBot - Allow public content pages
User-agent: ClaudeBot
Allow: /themes/
Allow: /developers/
Allow: /glossary
Allow: /api-docs
Allow: /choose-a-theme
Allow: /about
Allow: /partners
Allow: /changelog
Allow: /charts
Allow: /collections
Allow: /llms.txt
Allow: /llms-full.txt
Allow: /sitemap
Disallow: /admin/
Disallow: /dashboard/
Disallow: /users/
Disallow: /checkouts/
Disallow: /pay/
# Google-Extended - Allow public content pages (for AI training)
User-agent: Google-Extended
Allow: /themes/
Allow: /developers/
Allow: /glossary
Allow: /api-docs
Allow: /choose-a-theme
Allow: /about
Allow: /partners
Allow: /changelog
Allow: /charts
Allow: /collections
Allow: /llms.txt
Allow: /llms-full.txt
Allow: /sitemap
Disallow: /admin/
Disallow: /dashboard/
Disallow: /users/
Disallow: /checkouts/
Disallow: /pay/
# Block ads bots entirely
User-agent: AdsBot-Google
Disallow: /
Sitemap: https://shopinfo.app/sitemap.xml