# MetaDefender Cloud Commercial - Robots.txt # This file provides guidance to search engine crawlers # Allow all crawlers by default User-agent: * # Allow crawling of public pages Allow: / Allow: /dashboard/ Allow: /results/ Allow: /reports/ Allow: /licensing/ # Disallow sensitive and private areas Disallow: /api/ Disallow: /admin/ Disallow: /auth/ Disallow: /_gatsby/ Disallow: /static/admin/ Disallow: /private/ Disallow: /internal/ Disallow: /login Disallow: /logout Disallow: /certs/ Disallow: /docker/ Disallow: /node_modules/ Disallow: /.git/ Disallow: /.cache/ Disallow: /public/ Disallow: /src/ Disallow: /coverage/ Disallow: /tests/ Disallow: /__tests__/ # Disallow file types that shouldn't be indexed Disallow: /*.json$ Disallow: /*.xml$ Disallow: /*.txt$ Disallow: /*.log$ Disallow: /*.env$ Disallow: /*.config$ Disallow: /*.lock$ # Specific crawler rules User-agent: Googlebot Allow: / Disallow: /api/ Disallow: /admin/ Disallow: /auth/ User-agent: Bingbot Allow: / Disallow: /api/ Disallow: /admin/ Disallow: /auth/ User-agent: Slurp Allow: / Disallow: /api/ Disallow: /admin/ Disallow: /auth/ # Block aggressive crawlers User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: DotBot Disallow: / User-agent: BLEXBot Disallow: / # Sitemap location Sitemap: https://commercial.metadefender.local/sitemap.xml # Crawl delay (optional - helps prevent server overload) Crawl-delay: 1