# All robots will spider the domain User-agent: * Disallow: /events/category/live-event/list/ # Disallow folder /wp-admin/ # This is a standard practice to prevent bots from accessing the WordPress admin area User-agent: * Disallow: /wp-admin/ Disallow: /wp-login.php Disallow: /wp-register.php # Disallow access to the AJAX handler script in WordPress User-agent: * Disallow: /wp-admin/admin-ajax.php # Disallow event crawling User-agent: * Disallow: /events/* # Allow Googlebot to access the JavaScript and CSS files User-agent: Googlebot Allow: /wp-includes/js/ Allow: /wp-content/themes/ Allow: /wp-content/plugins/ # Prevent Googlebot from indexing certain directories and file types User-agent: Googlebot Disallow: /cgi-bin/ Disallow: /tmp/ Disallow: /junk/ Disallow: /archive/ Disallow: /test/ Disallow: /old/ Disallow: /backup/ Disallow: *.pdf Disallow: *.docx Disallow: *.xls # Prevent Amazonbot from crawling the entire site (Example directive) User-agent: Amazonbot Disallow: / # Prevent any bot from accessing a specific directory User-agent: * Disallow: /private/ # Prevent any bot from accessing a specific file # User-agent: * # Disallow: /private/file-to-hide.html # Sitemap parameter specifies the path to your sitemap file Sitemap: https://elireview.com/sitemap.xml