# # robots.txt for epages # # applies to all robots # disallow TBO, PBO # prevent mass indexing of search pages User-agent: * Crawl-delay: 100 Request-rate: 1/100s # max 1 request per 100 seconds Disallow: /epages/Site.admin/ Disallow: /epages/*ViewAction=FacetedSearchProducts Disallow: /epages/*ViewAction=RemoteSearchProducts Disallow: /epages/*ViewAction=ViewRemoteSearchProducts Disallow: /epages/*ViewAction=ViewDetailSearchProducts Disallow: /epages/*ViewAction=MobileSF-ViewDetailRemoteSearchProducts Disallow: /epages/*ViewAction=MobileSF-DetailRemoteSearchProducts Disallow: /epages/*ViewAction=MobileSF-ViewDetailFacetedSearchProducts Disallow: /epages/*ViewAction=MobileSF-DetailFacetedSearchProducts # –– Full blocks for aggressive crawlers User-agent: MJ12bot Disallow: / User-agent: SemrushBot Disallow: / User-agent: SemrushBot-SA Disallow: / User-agent: MauiBot Disallow: / User-agent: MauiBot (crawler.feedback+dc@gmail.com) Disallow: / User-agent: GeedoBot Disallow: / User-agent: PetalBot Disallow: / User-agent: GPTBot Disallow: / User-agent: YandexCalendar Disallow: / User-agent: YandexMobileBot Disallow: /private/*.txt$ # –– Search‑engine‑specific overrides User-agent: bingbot Crawl-delay: 600 # wait ≥600 seconds between requests Request-rate: 1/600s # max 1 request per 600 seconds Disallow: / #User-agent: Googlebot #Crawl-delay: 600 #Request-rate: 1/600s #Disallow: / User-agent: Amazonbot Crawl-delay: 150 Request-rate: 1/150s Disallow: /