Files
ukaiautomation/robots.txt

84 lines
1.4 KiB
Plaintext
Raw Permalink Normal View History

# UK AI Automation - robots.txt
# https://ukaiautomation.co.uk
User-agent: *
Allow: /
# Block sensitive directories and files
Disallow: /includes/
Disallow: /assets/
Disallow: /admin/
Disallow: /logs/
Disallow: /vendor/
Disallow: /config/
Disallow: /database/
Disallow: /docker/
# Block configuration and handler files
Disallow: /*-handler.php
Disallow: /*.log$
Disallow: /*.inc$
Disallow: /*.sql$
Disallow: /*.sh$
Disallow: /*.bak$
# Block query string URLs to prevent duplicate content
Disallow: /*?*
# Allow important static assets for rendering
Allow: /assets/css/*.css
Allow: /assets/js/*.js
Allow: /assets/images/*.webp
Allow: /assets/images/*.png
Allow: /assets/images/*.jpg
Allow: /assets/images/*.svg
# Sitemaps
Sitemap: https://ukaiautomation.co.uk/sitemap.xml
# Crawl-delay for respectful crawling
Crawl-delay: 1
# Specific instructions for major search engines
User-agent: Googlebot
Allow: /
Crawl-delay: 0
User-agent: Bingbot
Allow: /
Crawl-delay: 1
# AI crawlers - explicitly allowed for citation
User-agent: GPTBot
Allow: /
User-agent: ChatGPT-User
Allow: /
User-agent: ClaudeBot
Allow: /
User-agent: anthropic-ai
Allow: /
User-agent: PerplexityBot
Allow: /
User-agent: Google-Extended
Allow: /
User-agent: Applebot-Extended
Allow: /
User-agent: Bytespider
Allow: /
User-agent: CCBot
Allow: /
User-agent: FacebookBot
Allow: /
User-agent: Amazonbot
Allow: /