# Robots.txt for Stitcwiddl Custom Cosplay Costumes # Allow all web crawlers to access the site User-agent: * Allow: / # Sitemap location Sitemap: https://stitcwiddl.center/sitemap.xml # Crawl delay (optional - helps with server load) Crawl-delay: 1 # Disallow crawling of certain files and directories Disallow: /admin/ Disallow: /private/ Disallow: /temp/ Disallow: /*.json$ Disallow: /*.log$ Disallow: /api/ # Allow crawling of CSS and JavaScript files for better indexing Allow: /*.css$ Allow: /*.js$ # Allow crawling of image files Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.png$ Allow: /*.gif$ Allow: /*.svg$ Allow: /*.webp$ # Specific bot instructions # Google bots User-agent: Googlebot Allow: / Crawl-delay: 1 # Bing bot User-agent: Bingbot Allow: / Crawl-delay: 1 # Yahoo bot User-agent: Slurp Allow: / Crawl-delay: 2 # Facebook bot (for social sharing) User-agent: facebookexternalhit Allow: / # Twitter bot (for social sharing) User-agent: Twitterbot Allow: / # LinkedIn bot (for social sharing) User-agent: LinkedInBot Allow: / # Pinterest bot User-agent: Pinterest Allow: / # Disallow aggressive crawlers that might overload the server User-agent: MJ12bot Disallow: / User-agent: AhrefsBot Crawl-delay: 10 User-agent: SemrushBot Crawl-delay: 10 # Block spam bots and scrapers User-agent: SiteBot Disallow: / User-agent: webmeup-crawler Disallow: / User-agent: seokicks-robot Disallow: / User-agent: XoviBot Disallow: / User-agent: spbot Disallow: / # Note: This robots.txt is configured for a business website # It allows legitimate search engines while protecting against # aggressive crawlers and spam bots that could impact server performance.