User-agent: * Disallow: /Admin/ Disallow: /Activate.aspx Disallow: /Activate.aspx$ Disallow: /Activate.aspx* Disallow: /Error.aspx Disallow: /Error.aspx$ Disallow: /Error.aspx* Disallow: /Password.aspx Disallow: /Password.aspx$ Disallow: /Password.aspx* Disallow: /shoppingcart.aspx Disallow: /shoppingcart.aspx$ Disallow: /shoppingcart.aspx* Disallow: /shoppingcart Disallow: /payment.aspx Disallow: /payment.aspx$ Disallow: /payment.aspx* Disallow: /payment Disallow: /myaccount.aspx Disallow: /myaccount.aspx$ Disallow: /myaccount.aspx* Disallow: /myaccount Disallow: /*.axd$ Disallow: /*.axd Disallow: /*add-to-cart=* Disallow: /shoppingcart Disallow: /klantenservice Disallow: /search Disallow: /pagina Disallow: /return Disallow: /password Disallow: /resetpassword # # Disallow for WebResource.axd caching issues. Several instances below to cover all search engines. # # To specify matching the end of a URL, use $ # Disallow: /*.axd$ # # However, WebResource.axd and ScriptResource.axd always include a query string parameter the URL does # not end with .axd thus, the correct robots.txt record for Google would be: # Disallow: /*.axd # # Not all crawlers recognize the wildcard '*' syntax. To comply with the robots.txt draft RFC # Note that the records are case sensitive, and error page is showing the requests to be in lower case # so let's include both cases below: # Disallow: /ScriptResource.axd Disallow: /ScriptResource.axd$ Disallow: /ScriptResource.axd* Disallow: /WebResource.axd Disallow: /WebResource.axd$ Disallow: /WebResource.axd* Disallow: /CombineScriptsHandler.axd Disallow: /CombineScriptsHandler.axd$ Disallow: /CombineScriptsHandler.axd* Disallow: /scriptresource.axd Disallow: /webresource.axd Disallow: /combinescriptshandler.axd User-agent: Googlebot Allow: / Disallow: /Admin/ Disallow: /Activate.aspx Disallow: /Activate.aspx$ Disallow: /Activate.aspx* Disallow: /Error.aspx Disallow: /Error.aspx$ Disallow: /Error.aspx* Disallow: /Password.aspx Disallow: /Password.aspx$ Disallow: /Password.aspx* Disallow: /shoppingcart.aspx Disallow: /shoppingcart.aspx$ Disallow: /shoppingcart.aspx* Disallow: /shoppingcart Disallow: /payment.aspx Disallow: /payment.aspx$ Disallow: /payment.aspx* Disallow: /payment Disallow: /myaccount.aspx Disallow: /myaccount.aspx$ Disallow: /myaccount.aspx* Disallow: /myaccount Disallow: /*.axd$ Disallow: /*.axd Disallow: /*add-to-cart=* Disallow: /shoppingcart Disallow: /klantenservice Disallow: /search Disallow: /pagina Disallow: /return Disallow: /password Disallow: /resetpassword # # Disallow for WebResource.axd caching issues. Several instances below to cover all search engines. # # To specify matching the end of a URL, use $ # Disallow: /*.axd$ # # However, WebResource.axd and ScriptResource.axd always include a query string parameter the URL does # not end with .axd thus, the correct robots.txt record for Google would be: # Disallow: /*.axd # # Not all crawlers recognize the wildcard '*' syntax. To comply with the robots.txt draft RFC # Note that the records are case sensitive, and error page is showing the requests to be in lower case # so let's include both cases below: # Disallow: /ScriptResource.axd Disallow: /ScriptResource.axd$ Disallow: /ScriptResource.axd* Disallow: /WebResource.axd Disallow: /WebResource.axd$ Disallow: /WebResource.axd* Disallow: /CombineScriptsHandler.axd Disallow: /CombineScriptsHandler.axd$ Disallow: /CombineScriptsHandler.axd* Disallow: /scriptresource.axd Disallow: /webresource.axd Disallow: /combinescriptshandler.axd User-agent: bingbot Allow: / Disallow: /Admin/ Disallow: /Activate.aspx Disallow: /Activate.aspx$ Disallow: /Activate.aspx* Disallow: /Error.aspx Disallow: /Error.aspx$ Disallow: /Error.aspx* Disallow: /Password.aspx Disallow: /Password.aspx$ Disallow: /Password.aspx* Disallow: /shoppingcart.aspx Disallow: /shoppingcart.aspx$ Disallow: /shoppingcart.aspx* Disallow: /shoppingcart Disallow: /payment.aspx Disallow: /payment.aspx$ Disallow: /payment.aspx* Disallow: /payment Disallow: /myaccount.aspx Disallow: /myaccount.aspx$ Disallow: /myaccount.aspx* Disallow: /myaccount Disallow: /*.axd$ Disallow: /*.axd Disallow: /*add-to-cart=* Disallow: /shoppingcart Disallow: /klantenservice Disallow: /search Disallow: /pagina Disallow: /return Disallow: /password Disallow: /resetpassword # # Disallow for WebResource.axd caching issues. Several instances below to cover all search engines. # # To specify matching the end of a URL, use $ # Disallow: /*.axd$ # # However, WebResource.axd and ScriptResource.axd always include a query string parameter the URL does # not end with .axd thus, the correct robots.txt record for Google would be: # Disallow: /*.axd # # Not all crawlers recognize the wildcard '*' syntax. To comply with the robots.txt draft RFC # Note that the records are case sensitive, and error page is showing the requests to be in lower case # so let's include both cases below: # Disallow: /ScriptResource.axd Disallow: /ScriptResource.axd$ Disallow: /ScriptResource.axd* Disallow: /WebResource.axd Disallow: /WebResource.axd$ Disallow: /WebResource.axd* Disallow: /CombineScriptsHandler.axd Disallow: /CombineScriptsHandler.axd$ Disallow: /CombineScriptsHandler.axd* Disallow: /scriptresource.axd Disallow: /webresource.axd Disallow: /combinescriptshandler.axd #-- Spam Bots & Other Unwanted Bots --# User-agent: aiHitBot User-agent: Barkrowler User-agent: BDCbot User-agent: BLEXBot User-agent: BLP_bbot User-agent: brokenlinkcheck.com User-agent: Buck User-agent: CCBot User-agent: Cliqzbot User-agent: cyencebot User-agent: DomainCrawler User-agent: Dow Jones Searchbot User-agent: Exabot User-agent: ExtLinksBot User-agent: FemtosearchBot User-agent: Fever User-agent: GarlikCrawler User-agent: Gigabot User-agent: gobuster User-agent: GrapeshotCrawler User-agent: heritrix User-agent: istellabot User-agent: Jersey User-agent: Jobkicks User-agent: libwww-perl User-agent: linkdexbot User-agent: LinkpadBot User-agent: ltx71 - (http://ltx71.com/) User-agent: lua-resty-http User-agent: LumtelBot User-agent: magpie-crawler User-agent: Magus bot User-agent: Mail.RU_Bot User-agent: Megaindex.ru User-agent: Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0) LinkCheck by Siteimprove.com User-agent: Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0) SiteCheck-sitecrawl by Siteimprove.com User-agent: NL-Crawler User-agent: OnPageBot User-agent: Riddler User-agent: ScoutJet User-agent: Scrapy User-agent: Seekport User-agent: SeznamBot User-agent: Siteimprove User-agent: SMTBot User-agent: UptimeRobot User-agent: VelenPublicWebCrawler User-agent: Wget User-agent: yacybot User-agent: Yeti User-agent: YisouSpider User-agent: YunSecurityBot User-agent: ZoominfoBot #Disallow: / #-- SEO Tools & Service - Set Crawl Delay for Optimal Performance --# User-agent: AhrefsBot User-agent: AhrefsSiteAudit User-agent: Caliperbot User-agent: DotBot User-agent: HubSpot User-agent: MJ12bot User-agent: rogerbot User-agent: SemrushBot Crawl-Delay: 5 Sitemap: https://www.zepig.nl/sitemap.xml