manual save(2026-01-15 15:54)
This commit is contained in:
36
public/robots.txt
Normal file
36
public/robots.txt
Normal file
@@ -0,0 +1,36 @@
|
||||
# Robots.txt for TuringFlow
|
||||
User-agent: *
|
||||
Allow: /
|
||||
Disallow: /admin/
|
||||
Disallow: /api/
|
||||
Disallow: /private/
|
||||
|
||||
# Sitemap
|
||||
Sitemap: https://turingflow.ai/sitemap.xml
|
||||
|
||||
# Crawl-delay for search engines
|
||||
Crawl-delay: 10
|
||||
|
||||
# Host
|
||||
Host: https://turingflow.ai/
|
||||
|
||||
# User-agent specific rules
|
||||
User-agent: Googlebot
|
||||
Allow: /
|
||||
Disallow: /admin/
|
||||
Crawl-delay: 5
|
||||
|
||||
User-agent: Bingbot
|
||||
Allow: /
|
||||
Disallow: /admin/
|
||||
Crawl-delay: 5
|
||||
|
||||
User-agent: Baiduspider
|
||||
Allow: /
|
||||
Disallow: /admin/
|
||||
Crawl-delay: 5
|
||||
|
||||
User-agent: YandexBot
|
||||
Allow: /
|
||||
Disallow: /admin/
|
||||
Crawl-delay: 5
|
||||
Reference in New Issue
Block a user