# robots.txt for https://tradinggoose.ai # Content Signals declare how automated systems may use content after access. # Preferences here allow search indexing while disallowing AI training and AI input use. # Allow all crawlers User-agent: * Content-Signal: ai-train=no, search=yes, ai-input=no Allow: / Disallow: /api/ Disallow: /workspace/ Disallow: /_next/ Disallow: /private/ Disallow: /*.json$ # Specific crawler rules User-agent: Googlebot Content-Signal: ai-train=no, search=yes, ai-input=no Allow: / Crawl-delay: 0 User-agent: Bingbot Content-Signal: ai-train=no, search=yes, ai-input=no Allow: / Crawl-delay: 1 # AI/LLM crawlers User-agent: GPTBot User-agent: ChatGPT-User User-agent: OAI-SearchBot User-agent: CCBot User-agent: anthropic-ai User-agent: Claude-Web User-agent: ClaudeBot User-agent: Claude-SearchBot User-agent: PerplexityBot User-agent: Perplexity-User User-agent: Google-Extended User-agent: GoogleOther User-agent: Applebot-Extended User-agent: Meta-ExternalAgent User-agent: Bytespider User-agent: cohere-ai User-agent: DuckAssistBot User-agent: Amazonbot User-agent: YouBot User-agent: MistralAI-User Content-Signal: ai-train=no, search=yes, ai-input=no Allow: / # Sitemap location Sitemap: https://tradinggoose.ai/sitemap.xml # Host Host: https://tradinggoose.ai