# Robots.txt # Allow all crawlers to access the site User-agent: * Allow: / # Sitemap location - helps search engines find all pages efficiently # This includes hreflang information for international SEO Sitemap: https://www.gn-app.com/api/sitemap # Allow sitemap and robots endpoint to be crawled # Note: /robots.txt is rewritten to /api/robots internally, but this is transparent to crawlers Allow: /api/sitemap Allow: /api/robots # Disallow other API endpoints to reduce unnecessary crawl load Disallow: /api/ # Disallow crawling of internal Next.js files Disallow: /_next/ Disallow: /__/auth/ # Disallow crawling of service worker files Disallow: /sw.js Disallow: /firebase-messaging-sw.js Disallow: /workbox-*.js