# This robots.txt file controls crawling of URLs under https://example.com. # All crawlers are disallowed to crawl files in the "includes" directory, such # as .css, .js, but Google needs them for rendering, so Googlebot is allowed # to crawl them. User-agent: facebookexternalhit/1.1 Disallow: / User-agent: meta-externalagent/1.1 Disallow: / User-agent: Meta-ExternalAgent Disallow: User-agent: facebookexternalhit Disallow: / User-agent: Googlebot Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: AdsBot-Google Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: Googlebot-Image Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: Storebot-Google Allow: / Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: Bingbot Allow: / Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: BingPreview Allow: / Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: DuckDuckbot Allow: / Disallow: /shop Disallow: /?s= Disallow: /search Disallow: /wp-json Disallow: /cart Disallow: /wishlist Disallow: /checkout Disallow: /my-account Disallow: /*?* User-agent: * Disallow: / Sitemap: https://www.airsoft-base.nl/sitemap_index.xml User-agent: * Disallow: /wp-content/uploads/wpo/wpo-plugins-tables-list.json