# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-Agent: *
Disallow: /demo/
Disallow: /demo/*
Disallow: /blogposts/
Disallow: /blogposts/*
Disallow: /spritzerrallye/
Disallow: /spritzerrallye/*
Disallow: /preise/
Disallow: /preise/*
noindex: *d=tomorrow*
noindex: *d=today*
noindex: *d=weekend*
# noindex: *location=*
# noindex: *lat=*
# noindex: *lon=*
# noindex: *distance=*
Disallow: /users/
Disallow: /users/*
User-agent: Yandex
Disallow: /
User-agent: Baiduspider
Disallow: /
User-agent: FatBot
Disallow: /
User-agent: FatBot 2.0
Disallow: /
User-agent: AhrefsBot
Disallow: /
User-agent: proximic
Disallow: /
User-agent: sogou spider
Disallow: /
User-agent: sogou web spider
Disallow: /
User-agent: grapeshot
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: BLEXBot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: DotBot
Disallow: /
User-agent: PetalBot
Disallow: /
User-agent: CriteoBot/0.1
Disallow: /
User-agent: admantx-adform
Disallow: /
User-agent: ias-ir/3.1
Disallow: /
User-agent: ias-or/3.1
Disallow: /
User-agent: ias-va/3.1
Disallow: /