# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-Agent: *
Disallow: /jobs/*/tracker
Disallow: /jobs/*/preview
Disallow: /jobs/*/applicants
Disallow: /jobs/*/manage
Disallow: /messages/*
Disallow: /applicants/new
Disallow: /backfills/latest_jobs
Disallow: /auth/*
Disallow: /clk/*
Disallow: /employers/*
# ZR integration blocks
Disallow: /c/*
Disallow: /s/*
Disallow: /e/*
Disallow: /g/*
Disallow: /n/*
Disallow: /Salaries/*
# block search query params
Disallow: /*?*lat=
Disallow: /*?*long=
Disallow: /*?*sort=
User-Agent: YandexImages
Disallow: /
User-agent: Yandex
Disallow: /
User-agent: EasouSpider
Disallow: /
User-agent: SMTBot
Disallow: /
User-agent: Pcore-HTTP
Disallow: /
User-agent: BUbiNG
Disallow: /
User-agent: Companybook-Crawler
Disallow: /
User-agent: Wotbox/2.01
Disallow: /
User-agent: CCBot/2.0
Disallow: /
User-agent: CCBot
Disallow: /
User-agent: EbiBot
Disallow: /
User-agent: Pcore-HTTP/v0.24.5
Disallow: /
User-agent: TestiTest1
Disallow: /
User-agent: Vegi bot
Disallow: /
User-agent: istellabot/t.1
Disallow: /
User-agent: ltx71
Disallow: /
User-agent: ltx71 - (http://ltx71.com/)
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: MauiBot
Disallow: /
User-agent: VelenPublicWebCrawler
Disallow: /