# Robots.txt for myblog # Allow all crawlers by default User-agent: * Allow: / # Specifically allow GitHub Copilot and related crawlers User-agent: GitHubBot Allow: / User-agent: GitHub-Copilot Allow: / User-agent: copilot Allow: / User-agent: copilotbot Allow: / User-agent: github-copilot Allow: / User-agent: Microsoft-Copilot Allow: / User-agent: CopilotForBusiness Allow: / # Allow other AI training crawlers and agents User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: OpenAIBot Allow: / User-agent: CCBot Allow: / User-agent: anthropic-ai Allow: / User-agent: Claude-Web Allow: / User-agent: ClaudeBot Allow: / Allow: / User-agent: anthropic-ai Allow: / User-agent: Claude-Web Allow: / # Common search engines User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / # Sitemap Sitemap: https://www.tritry.jp/sitemap.xml