## Enable robots.txt rules for all crawlers
User-agent: *

## Do not crawl development/server technical folders or structural files and folders:
Disallow: /cgi-bin/
Disallow: /admin/
Disallow: /tinymce/
Disallow: /tinymce_jquery/
Disallow: /themes/
Disallow: /theme-color/
Disallow: /tinymce_responsivefilemanager/
Disallow: /includes/
Disallow: /config/
Disallow: /chat/
Disallow: /blackhole/*
Disallow: /key-check.php

## Flightdec sitemap for sites pages: 
Sitemap: http://e-learning.org.nz/sitemap.xml

## Do not crawl pages that are sorted, filtered - is this a good idea?
##Disallow: /*?filter*  
##Disallow: /*&filter*   ## events module
##Disallow: /*?filter_general*   ## magic modules

##  or searched??
## Disallow: /*?words=*     ##word search


User-agent: *
Disallow: /blackhole/*

## Crawl-delay parameter: number of seconds to wait between successive requests to the same server.
## Set a custom crawl rate if you're experiencing traffic problems with your server. 
##ignored by google..
Crawl-delay: 5


