# InklySoft - robots.txt # SEO-optimized crawling directives for all bots # Default rules for all bots User-agent: * Allow: / Allow: /privacy Allow: /legal Allow: /# Allow: /*.css$ Allow: /*.js$ Allow: /*.svg$ Allow: /*.png$ Allow: /*.xml$ Allow: /*.jpg$ Allow: /*.webp$ Allow: /build/ Allow: /static/ # Disallow private/admin areas (none currently, but reserved) Disallow: /admin/ Disallow: /.git/ Disallow: /.well-known/security.txt Disallow: /*.json$ # Crawl-delay to be respectful to server Crawl-delay: 1 # Allow fragment/anchor navigation crawling # Important for SPA sections (#pricing, #process, etc.) Allow: /#pricing Allow: /#process Allow: /#work Allow: /#about Allow: /#faq Allow: /#contact # Specifically allow AI/LLM crawlers for improved discoverability User-agent: GPTBot Allow: / Crawl-delay: 2 User-agent: CCBot Allow: / Crawl-delay: 2 User-agent: anthropic-ai Allow: / Crawl-delay: 2 User-agent: Claude-Web Allow: / Crawl-delay: 2 User-agent: Googlebot Allow: / Crawl-delay: 1 User-agent: Bingbot Allow: / Crawl-delay: 1 # Block bad/aggressive bots User-agent: AhrefsBot User-agent: SemrushBot Disallow: / # Sitemap reference for all crawlers Sitemap: https://inklysoft.xyz/sitemap.xml Sitemap: https://inklysoft.xyz/robots.txt