# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file # # To ban all spiders from the entire site uncomment the next two lines: # User-Agent: * # Disallow: / User-Agent: 008 Disallow: / User-Agent: sistrix Disallow: / User-Agent: Baiduspider Disallow: / User-Agent: Yandex Disallow: / User-Agent: Ezooms Disallow: / User-Agent: Linguee Disallow: / User-Agent: SeznamBot disallow: / User-Agent: businessdbbot disallow: / User-Agent: Spinn3r disallow: / User-Agent: psbot Disallow: / User-Agent: R6_CommentReader Disallow: / User-Agent: TurnitinBot Disallow: / User-Agent: JikeSpider Disallow: / User-Agent: oBot Disallow: / User-Agent: PaperLiBot Disallow: / User-Agent: Exabot Disallow: / User-Agent: WBSearchBot Disallow: / User-Agent: SEOkicks-Robot Disallow: / User-Agent: discoverybot Disallow: / User-Agent: Genieo Disallow: / User-Agent: meanpathbot Disallow: / User-Agent: * Disallow: /admin/autocompletions/agents_for_display Disallow: /onboard_schools Disallow: /showings/new Disallow: /inquiries/new Disallow: /email_friends/new Disallow: /searches$ Disallow: /rental_searches$ Disallow: /commercial_searches$ Disallow: /searches/1* Disallow: /searches/2* Disallow: /searches/3* Disallow: /searches/4* Disallow: /searches/5* Disallow: /searches/6* Disallow: /searches/7* Disallow: /searches/8* Disallow: /searches/9* Disallow: /auth/ User-agent: Amazonbot Disallow: / User-agent: ScoutJet Crawl-delay: 5 User-agent: AppleNewsBot Crawl-delay: 5 User-Agent: AhrefsBot Crawl-delay: 1 User-Agent: Amazonbot Crawl-delay: 10 User-Agent: MJ12bot Crawl-Delay: 20 User-agent: barkrowler Crawl-Delay: 5 Sitemap: https://www.bohemiarealtygroup.com/site_map.xml