# robots.txt file for CourseHorse.com User-agent: * Disallow: /user/ Disallow: /team2/ Disallow: /course/checkout/ Disallow: /course/checkout/enter-info Disallow: /course/checkout/enter-info?schedule= Disallow: /cart Disallow: /school-admin-wiki Disallow: /gift-card?schedule= Disallow: /gift-card?deal= Disallow: /*?deal Disallow: /course/index/recommendations Disallow: /course/checkout/info-session Disallow: /course/checkout/confirm Disallow: /home Disallow: /goal-pledge # Special rule for semrush, b/c they don't respect user-agent: *?! User-agent: SemrushBot Disallow: /user/ Crawl-delay: 10 User-agent: SemrushBot-SA Disallow: /user/ Crawl-delay: 10 User-agent: SemrushBot-SI Disallow: /user/ Crawl-delay: 10 # Block bots that aren't relevant User-agent: Baiduspider Disallow: / User-agent: Yandex Disallow: / User-agent: MegaIndex Disallow: / User-agent: MJ12bot Disallow: / User-agent: IstellaBot Disallow: / User-agent: ExaBot Disallow: / User-agent: DomainAppender Disallow: / User-agent: VegeBot Disallow: / User-agent: betaBot Disallow: / User-agent: SMTBot Disallow: / User-agent: CheckDogBt Disallow: / # Slow down bots that are heavy handed User-agent: Slurp Crawl-delay: 10 User-agent: AhrefsBot Crawl-delay: 10 User-agent: rogerBot Crawl-delay: 10 User-agent: DotBot Crawl-delay: 10 User-agent: NextGenSearchBot Crawl-delay: 10 User-agent: bingbot Crawl-delay: 10 User-agent: Amazonbot Crawl-delay: 10