# robots.txt - IJDST Journal SEO Configuration # This file guides search engine crawlers on how to index the website # Default rules for all crawlers User-agent: * Allow: / Disallow: /admin/ Disallow: /admin Disallow: /2fa Disallow: /api/ Disallow: /storage/ Disallow: /config/ Disallow: /bootstrap/ Disallow: /resources/ Disallow: /vendor/ Disallow: /*.json$ Disallow: /*.sql$ # Allow crawling of public assets Allow: /public/ Allow: /assets/ Allow: /build/ # Google-specific rules User-agent: Googlebot Allow: / Disallow: /admin/ Disallow: /2fa Crawl-delay: 0 # Bing-specific rules User-agent: Bingbot Allow: / Disallow: /admin/ Disallow: /2fa Crawl-delay: 1 # Block bad bots User-agent: MJ12bot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / # Sitemap location Sitemap: https://dev.ijdst.com/sitemap.xml