diff --git a/roles/MonashBioinformaticsPlatform_node_allocation/tasks/main.yml b/roles/MonashBioinformaticsPlatform_node_allocation/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..8a5bf2d56789282cc40e2bb5a47cd81f12b4a640
--- /dev/null
+++ b/roles/MonashBioinformaticsPlatform_node_allocation/tasks/main.yml
@@ -0,0 +1,4 @@
+---
+- name: Install tmux
+  apt: name=tmux state=latest
+  sudo: true
diff --git a/roles/enable_lmod/tasks/main.yml b/roles/enable_lmod/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..4676f706030c27b4b86e35a342e26fb3ae9ad74b
--- /dev/null
+++ b/roles/enable_lmod/tasks/main.yml
@@ -0,0 +1,33 @@
+---
+- include_vars: "{{ ansible_os_family }}.yml"
+
+- name: install lua
+  yum: name={{ item }} state=installed
+  with_items:
+    - lua
+    - lua-filesystem
+    - lua-posix
+  sudo: true
+  when: ansible_os_family == 'RedHat'
+
+- name: install lua
+  apt: name={{ item }} state=installed
+  with_items:
+    - lua5.2
+    - lua5.2
+    - lua-filesystem
+    - lua-bitop
+    - lua-posix
+    - liblua5.2-0
+    - liblua5.2-dev
+    - tcl
+  sudo: true
+  when: ansible_os_family == 'Debian'
+
+- name: link bash
+  file: src={{ soft_dir }}/lmod/lmod/init/bash dest=/etc/profile.d/lmod.sh state=link
+  sudo: true
+
+- name: link csh
+  file: src={{ soft_dir }}/lmod/lmod/init/cshrc dest=/etc/profile.d/lmod.csh state=link
+  sudo: true
diff --git a/roles/enable_lmod/vars/Debian.yml b/roles/enable_lmod/vars/Debian.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f1a8d1448746c5492a52b1e29983120d6faf9831
--- /dev/null
+++ b/roles/enable_lmod/vars/Debian.yml
@@ -0,0 +1,2 @@
+---
+lua_include: /usr/include/lua5.2
diff --git a/roles/enable_lmod/vars/RedHat.yml b/roles/enable_lmod/vars/RedHat.yml
new file mode 100644
index 0000000000000000000000000000000000000000..533e08b0df15750498e38a23db460db834944a9b
--- /dev/null
+++ b/roles/enable_lmod/vars/RedHat.yml
@@ -0,0 +1,2 @@
+---
+lua_include: /usr/local
diff --git a/roles/enable_lmod/vars/main.yml b/roles/enable_lmod/vars/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ce2423328fad285e5b22b30a5257051c4475c96b
--- /dev/null
+++ b/roles/enable_lmod/vars/main.yml
@@ -0,0 +1,4 @@
+---
+source_dir: /tmp
+soft_dir: /usr/local
+lmod_version: 5.8.6
diff --git a/roles/jasons_ssh_ca/files/server_ca.pub b/roles/jasons_ssh_ca/files/server_ca.pub
new file mode 100644
index 0000000000000000000000000000000000000000..7e33e7cbf5a010dc6c9063e18d76017cddf09a39
--- /dev/null
+++ b/roles/jasons_ssh_ca/files/server_ca.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDOzj+PSIuuGMOkMK4UO01Y1d8+6jGYELM+msVDpeSCZCAYCqYzXay6QDl5IFpdlxlhWXzcsfbC8WcHy3z+jW6kP6BcqZd7+eYrZVcWeO9A+p67OcsOHw5ixhCjFlXKxX/3D4JeppQeIUswI33zw90QViAlOPsTQuvIIuNNKQhUvfTVvkljduFXNT3xjLWai+isYKWaCbfmaiQ7EQIQyX9a3RrBKcEsLbghk3UkSq/j1OlMTbIuBKfPu26slPNRQFVBjJJfkx+kFF9ArgywHDN5dX3CxGOJhC2KIBemOC5cXjUbUI15a1UReDqShhb0m4p9pTkFOexGOB17lh1/4nUuYt2xzRahNyAEz9i02eIaVkhYFjVn1OuKJ7pa44YwoGx8RmFjRp8W/i3Crbp/IqBzMCfOZmub98b0I7H9ryg+taACRga6sLqWTDrEAbj7zFmRaaOHDIvrFj5ITO4YKYwSaWKL8w19NX4VJqzO3VVHbmUxFBoK4tGDAQ39w6BfRdxdKb+FIe+MOz68k4ADKHJSf9+LCQOFEikKNkKVUNh7FjLwi5Wz7K4S5wjnrjTUiqNC5imst262UJjtTeg7wE7ngPOlpSi1Mh4pV3/tcAboiRF8ABS/P8P0chln1YbA73x45ZF/Is9XQ2XUJiUwutrcY+upRdu2p9JAeKxGrt8i7w== root@autht
diff --git a/roles/jasons_ssh_ca/handlers/main.yml b/roles/jasons_ssh_ca/handlers/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..875ea0a1df436812a61a5919059d0eb5b59e8884
--- /dev/null
+++ b/roles/jasons_ssh_ca/handlers/main.yml
@@ -0,0 +1,5 @@
+---
+- name: restart ssh debian
+  service: name=ssh state=restarted
+  sudo: true
+  when: ansible_os_family == "Debian"
diff --git a/roles/jasons_ssh_ca/tasks/main.yml b/roles/jasons_ssh_ca/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..4be9b14f7956c86fe584366c0df3dbd9d703fe97
--- /dev/null
+++ b/roles/jasons_ssh_ca/tasks/main.yml
@@ -0,0 +1,14 @@
+---
+- name: copy ca cert
+  copy: src=server_ca.pub dest=/etc/ssh/server_ca.pub owner=root group=root mode=644
+  sudo: true
+
+- name: edit sshd_config
+  lineinfile:
+  args: 
+    dest: /etc/ssh/sshd_config
+    line: TrustedUserCAKeys /etc/ssh/server_ca.pub
+    state: present
+  sudo: true
+  notify: restart ssh debian
+  when: ansible_os_family == "Debian"
diff --git a/roles/link_usr_local/tasks/main.yml b/roles/link_usr_local/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7f3e211f98ec1ec266cf0117b663e77f05e5c232
--- /dev/null
+++ b/roles/link_usr_local/tasks/main.yml
@@ -0,0 +1,13 @@
+---
+- name: stat usrlocal
+  stat: path={{ dest }}
+  register: stat_usrlocal
+
+- name: mv
+  command: mv /usr/local /usr/local_old
+  when: stat_usrlocal.stat.isdir == True
+  sudo: true
+
+- name: link
+  file: src="{{ src }}" dest="{{ dest }}" state=link
+  sudo: true
diff --git a/roles/lmod/tasks/main.yml b/roles/lmod/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7c63b8f05e12e703f84c1442e620cbfc0e38521b
--- /dev/null
+++ b/roles/lmod/tasks/main.yml
@@ -0,0 +1,44 @@
+---
+- include_vars: "{{ ansible_os_family }}.yml"
+
+- name: install lua
+  yum: name={{ item }} state=installed
+  with_items:
+    - lua
+    - lua-filesystem
+    - lua-posix
+  sudo: true
+  when: ansible_os_family == 'RedHat'
+
+- name: install lua
+  apt: name={{ item }} state=installed
+  with_items:
+    - lua5.2
+    - lua5.2
+    - lua-filesystem
+    - lua-bitop
+    - lua-posix
+    - liblua5.2-0
+    - liblua5.2-dev
+    - tcl
+  sudo: true
+  when: ansible_os_family == 'Debian'
+
+- name: Download LMOD
+  get_url:
+    url=http://downloads.sourceforge.net/project/lmod/Lmod-{{ lmod_version }}.tar.bz2
+    dest={{source_dir}}/Lmod-{{ lmod_version }}.tar.bz2
+    mode=0444
+
+- name: Uncompress LMOD
+  unarchive:
+    src={{ source_dir }}/Lmod-{{ lmod_version }}.tar.bz2
+    dest={{ source_dir }}
+    copy=no
+    creates={{source_dir}}/Lmod-{{ lmod_version }}/README
+
+- name: Compile and install Lmod
+  shell: cd {{ source_dir }}/Lmod-{{ lmod_version }}; ./configure --prefix={{ soft_dir }} --with-mpathSearch=YES --with-caseIndependentSorting=YES && make install LUA_INCLUDE={{ lua_include }}
+  args:
+    creates: "{{ soft_dir }}/lmod/{{ lmod_version }}"
+  sudo: true
diff --git a/roles/lmod/vars/Debian.yml b/roles/lmod/vars/Debian.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f1a8d1448746c5492a52b1e29983120d6faf9831
--- /dev/null
+++ b/roles/lmod/vars/Debian.yml
@@ -0,0 +1,2 @@
+---
+lua_include: /usr/include/lua5.2
diff --git a/roles/lmod/vars/RedHat.yml b/roles/lmod/vars/RedHat.yml
new file mode 100644
index 0000000000000000000000000000000000000000..533e08b0df15750498e38a23db460db834944a9b
--- /dev/null
+++ b/roles/lmod/vars/RedHat.yml
@@ -0,0 +1,2 @@
+---
+lua_include: /usr/local
diff --git a/roles/lmod/vars/main.yml b/roles/lmod/vars/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ce2423328fad285e5b22b30a5257051c4475c96b
--- /dev/null
+++ b/roles/lmod/vars/main.yml
@@ -0,0 +1,4 @@
+---
+source_dir: /tmp
+soft_dir: /usr/local
+lmod_version: 5.8.6
diff --git a/roles/modulefiles/tasks/main.yml b/roles/modulefiles/tasks/main.yml
index 84ae26e4a27dc2ea7de433eb9c954099b4f77a75..7700d3b82cfd23a1e24f7eb73161bf89ed81458c 100644
--- a/roles/modulefiles/tasks/main.yml
+++ b/roles/modulefiles/tasks/main.yml
@@ -14,4 +14,14 @@
   args:
     dest: /usr/share/Modules/init/.modulespath
     line: /usr/local/Modules/modulefiles
+  ignore_errors: true
+  sudo: true
+
+# for some reason ubuntu uses lowercase modules
+- name: add /usr/local/Modules to the module file path
+  lineinfile: 
+  args:
+    dest: /usr/share/modules/init/.modulespath
+    line: /usr/local/Modules/modulefiles
+  ignore_errors: true
   sudo: true
diff --git a/roles/provision/tasks/main.yml b/roles/provision/tasks/main.yml
index 3b71e4d40f287e6fefd0be5763aa05e8ec406b1a..27a2cbbd47537625c2e13efa54ff0132b0ab8c4a 100644
--- a/roles/provision/tasks/main.yml
+++ b/roles/provision/tasks/main.yml
@@ -4,5 +4,5 @@
   sudo: true
 
 - name: provision cron job 
-  cron: name=provision job={{ provision }} user=root minute=*/5 state=present
+  cron: name=provision job="/usr/bin/flock -x -n /tmp/provision.lck -c {{ provision }}" user=root minute=*/30 state=present
   sudo: true
diff --git a/roles/provision/templates/provision.sh.j2 b/roles/provision/templates/provision.sh.j2
index 69483a265968419c02dc1715e55caa51821a54fe..d4082c8ae41b59824252396bbc178bdeaf7931ef 100644
--- a/roles/provision/templates/provision.sh.j2
+++ b/roles/provision/templates/provision.sh.j2
@@ -3,6 +3,8 @@
 HOME_DIR={{ home_dir }}
 user_list=($(getent passwd | cut -d ":" -f1))
 log_file="/root/slurm.log"
+export PATH=$PATH:{{ slurm_dir }}/bin
+sacctmgr=$( which sacctmgr )
 
 for user in ${user_list[*]}; do
     {% if project_check is defined %}
@@ -24,15 +26,15 @@ for user in ${user_list[*]}; do
 
             find=$(sacctmgr list cluster ${cluster} | grep ${cluster})
             if [ -z "${find}" ]; then
-                su slurm -c "sacctmgr -i add cluster ${cluster}" || { echo "error to create cluster ${cluster}" >> ${log_file} && exit 1; }
+                su slurm -c "$sacctmgr -i add cluster ${cluster}" || { echo "error to create cluster ${cluster}" >> ${log_file} && exit 1; }
             fi
             find=$(sacctmgr list account ${account} | grep ${account})
             if [ -z "${find}" ]; then
-                su slurm -c "sacctmgr -i add account ${account} Description=CVL Organization=monash cluster=${cluster}" || { echo "error to create account ${account}" >> ${log_file} && exit 1; }
+                su slurm -c "$sacctmgr -i add account ${account} Description=CVL Organization=monash cluster=${cluster}" || { echo "error to create account ${account}" >> ${log_file} && exit 1; }
             fi
             find=$(sacctmgr list user ${user} | grep ${user})
             if [ -z "${find}" ]; then
-                su slurm -c "sacctmgr -i add user ${user} account=${account} cluster=${cluster}" || { echo "error to create user ${user}" >> ${log_file} && exit 1; }
+                su slurm -c "$sacctmgr -i add user ${user} account=${account} cluster=${cluster}" || { echo "error to create user ${user}" >> ${log_file} && exit 1; }
             fi
         fi
     fi
diff --git a/roles/provision/vars/main.yml b/roles/provision/vars/main.yml
index 2e084a92f49c7f6380db5cbc17aa8c2faff2f3cf..ed97d539c095cf1413af30cc23dea272095b97dd 100644
--- a/roles/provision/vars/main.yml
+++ b/roles/provision/vars/main.yml
@@ -1,2 +1 @@
 ---
-  slurm_provision: /root/slurm_provision.sh
diff --git a/roles/slurm-from-source/tasks/installCgroup.yml b/roles/slurm-from-source/tasks/installCgroup.yml
index 247983df5f642e58d329495f24e58fef58c0f850..9b21e1b4d7fba85c0b47e8ce12663faedd8b19f4 100644
--- a/roles/slurm-from-source/tasks/installCgroup.yml
+++ b/roles/slurm-from-source/tasks/installCgroup.yml
@@ -11,7 +11,9 @@
     - cgmanager
     - cgmanager-utils
     - libcgmanager0 
+  sudo: true
   when: ansible_os_family == "Debian"    
+  sudo: true
 
 - name: config cgroup.conf file
   template: dest={{ slurm_dir }}/etc/cgroup.conf src=cgroup.conf.j2 mode=644
diff --git a/roles/slurm-from-source/tasks/installNhc.yml b/roles/slurm-from-source/tasks/installNhc.yml
index 628b10a9706a75c8947fa8bf434cec0c42735c26..9d48a4e7adf23c6e61f6e06a356b2bb43fa69acf 100644
--- a/roles/slurm-from-source/tasks/installNhc.yml
+++ b/roles/slurm-from-source/tasks/installNhc.yml
@@ -21,6 +21,10 @@
   args:
     chdir: /tmp/warewulf-nhc-{{ nhc_version }} 
 
+- name: ensure sysconfig dir exists
+  file: dest=/etc/sysconfig state=directory owner=root group=root mode=755
+  sudo: true
+
 - name: copy nhc sysconfig script
   template: dest=/etc/sysconfig/nhc src=nhc.sysconfig.j2 mode=644
   sudo: true
@@ -39,7 +43,7 @@
   register: generate_nhc_config_file 
 
 - name: generate config file
-  shell: "{{ nhc_dir }}/sbin/nhc-genconf"
+  shell: "{{ nhc_dir }}/sbin/nhc-genconf -d -c {{ nhc_dir }}/etc/nhc/{{ nhc_config_file }} CONFDIR={{ nhc_dir }}/etc/nhc"
   sudo: true
   when: generate_nhc_config_file 
 
diff --git a/roles/slurm-from-source/tasks/main.yml b/roles/slurm-from-source/tasks/main.yml
index 1fa874cbab3f8a106551190f33ce070447435880..8a22cc68de4a29ebaaab94642227c483ace4c061 100644
--- a/roles/slurm-from-source/tasks/main.yml
+++ b/roles/slurm-from-source/tasks/main.yml
@@ -87,6 +87,7 @@
     - mysql-client
     - python-mysqldb
     - libmysqlclient-dev
+    - lua5.2
   when: ansible_os_family == "Debian"
 
 - include: installMungeFromSource.yml
@@ -134,7 +135,7 @@
   template: src=slurm.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
   sudo: true
   notify: restart slurm
-  when: slurm_use_vpn==False
+  when: slurm_use_vpn==False 
 
 - name: install slurm.conf
   template: src=slurm-vpn.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
@@ -151,7 +152,7 @@
   delegate_to: "{{ slurmctrl }}"
   run_once: true
   sudo: true
-  when: slurm_lua 
+  when: slurm_lua is defined
 
 - include: installCgroup.yml
 - include: installNhc.yml
diff --git a/roles/slurm-from-source/templates/slurm.conf.j2 b/roles/slurm-from-source/templates/slurm.conf.j2
index 949730f6e30bc6c0edfc56c40c7544c51347c690..b3770b09ec71e4128d0819113dc14982a256a52d 100644
--- a/roles/slurm-from-source/templates/slurm.conf.j2
+++ b/roles/slurm-from-source/templates/slurm.conf.j2
@@ -46,7 +46,7 @@ ReturnToService=1
 TaskPlugin=task/cgroup
 #TaskPlugin=task/affinity
 #TaskPlugin=task/affinity,task/cgroup
-{% if slurm_lua %}
+{% if slurm_lua is defined %}
 JobSubmitPlugins=lua
 {% endif %}
 OverTimeLimit=1
diff --git a/roles/strudel_config/templates/generic_slurm_config.json.j2 b/roles/strudel_config/templates/generic_slurm_config.json.j2
index 626495b706e69a4f05d13dba270d50a69f3f8dbe..17ba6262ca1f5455c95b38aa6e174afcd627ffad 100644
--- a/roles/strudel_config/templates/generic_slurm_config.json.j2
+++ b/roles/strudel_config/templates/generic_slurm_config.json.j2
@@ -91,7 +91,7 @@
                 "__class__": "cmdRegEx",
                 "__module__": "siteConfig",
                 "async": false,
-                "cmd": "\"squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"",
+                "cmd": "\"{{ slurm_dir }}/bin/squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"",
                 "failFatal": true,
                 "formatFatal": false,
                 "host": "login",
@@ -121,7 +121,7 @@
                 "__class__": "cmdRegEx",
                 "__module__": "siteConfig",
                 "async": false,
-                "cmd": "squeue -u {username} -o \\\"%i %L\\\" | tail -n -1",
+                "cmd": "{{ slurm_dir }}/bin/squeue -u {username} -o \\\"%i %L\\\" | tail -n -1",
                 "failFatal": true,
                 "formatFatal": false,
                 "host": "login",
@@ -208,7 +208,7 @@
                 "__class__": "cmdRegEx",
                 "__module__": "siteConfig",
                 "async": false,
-                "cmd": "\"scontrol show job {jobidNumber}\"",
+                "cmd": "\"{{ slurm_dir }}/bin/scontrol show job {jobidNumber}\"",
                 "failFatal": true,
                 "formatFatal": false,
                 "host": "login",
@@ -268,7 +268,7 @@
                 "__class__": "cmdRegEx",
                 "__module__": "siteConfig",
                 "async": false,
-                "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\nvncserver ; sleep 36000000 ' |  sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"",
+                "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\nvncserver ; sleep 36000000 ' |  {{slurm_dir}}/bin/sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"",
                 "failFatal": true,
                 "formatFatal": false,
                 "host": "login",
@@ -282,7 +282,7 @@
                 "__class__": "cmdRegEx",
                 "__module__": "siteConfig",
                 "async": false,
-                "cmd": "\"scancel {jobidNumber}\"",
+                "cmd": "\"{{ slurm_dir }}/bin/scancel {jobidNumber}\"",
                 "failFatal": true,
                 "formatFatal": false,
                 "host": "login",