# Robots.txt for MDV Consulting
# https://mdvconsulting.ma

# Allow all web crawlers to access all content
User-agent: *
Allow: /

# Sitemap location
Sitemap: https://mdvconsulting.ma/sitemap.xml

# Crawl delay for polite crawling (optional)
Crawl-delay: 1

# Disallow crawling of certain file types that aren't meant for search engines
Disallow: /*.json$
Disallow: /*.css$
Disallow: /*.js$
Disallow: /*.map$

# Disallow crawling of development/build files
Disallow: /node_modules/
Disallow: /dist/
Disallow: /build/
Disallow: /.git/
Disallow: /.vscode/
Disallow: /src/

# Allow crawling of important asset directories
Allow: /assets/
Allow: /public/
Allow: /favicon.ico

# Special directives for different search engines

# Google
User-agent: Googlebot
Allow: /
Crawl-delay: 1

# Bing
User-agent: Bingbot
Allow: /
Crawl-delay: 1

# Yandex (popular in some regions)
User-agent: YandexBot
Allow: /
Crawl-delay: 2

# Baidu (for Chinese market reach)
User-agent: Baiduspider
Allow: /
Crawl-delay: 2

# Block malicious bots
User-agent: MJ12bot
Disallow: /

User-agent: AhrefsBot
Disallow: /

User-agent: SemrushBot
Disallow: /

# Educational consulting specific - allow academic crawlers
User-agent: ia_archiver
Allow: /