# robots.txt for 智能体导航 # https://www.robotstxt.org/ User-agent: * Allow: / Disallow: /tmp/ Disallow: /application/ Disallow: /protected/ # 允许访问的目录 Allow: /public/ Allow: /?c=api&a=agentList Allow: /?c=api&a=categories Allow: /?c=api&a=platforms Allow: /?c=api&a=agentDetail # 禁止访问的API(避免重复抓取) Disallow: /?c=api&a=hotAgents Disallow: /?c=api&a=recommendAgents # 网站地图 Sitemap: /sitemap.xml # 抓取延迟(秒) Crawl-delay: 1 # 针对搜索引擎的特殊规则 User-agent: Googlebot Allow: / Crawl-delay: 1 User-agent: Bingbot Allow: / Crawl-delay: 1 User-agent: Baiduspider Allow: / Crawl-delay: 1 User-agent: Sogou Allow: / Crawl-delay: 1