# Hexagrid IT Solutions - Robots.txt User-agent: * Allow: / # Disallow specific paths that shouldn't be indexed Disallow: /admin Disallow: /api/ Disallow: /backend/ Disallow: /*.json$ Disallow: /src/ Disallow: /node_modules/ Disallow: /build/ Disallow: /dist/ # Allow important pages Allow: /images/ Allow: /assets/ Allow: /favicon.ico Allow: /sitemap.xml # Crawl delay (optional - helps with server load) Crawl-delay: 1 # Sitemap location Sitemap: https://hexagridit.com/sitemap.xml Sitemap: https://hexagridit.com/sitemap-images.xml # Google-specific directives User-agent: Googlebot Allow: / # Bing-specific directives User-agent: Bingbot Allow: / # Block AI crawlers if desired (optional) # User-agent: GPTBot # Allow: / # User-agent: ChatGPT-User # Allow: / # User-agent: CCBot # Allow: / # Social media crawlers User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: /