diff --git a/dynamicInventory b/dynamicInventory
index c0bda200c89866c77d76bc6d93ac3d5aaf2de057..cd93e153702649a30dda0a207edce50e21fa4740 100755
--- a/dynamicInventory
+++ b/dynamicInventory
@@ -175,7 +175,8 @@ class OpenStackConnection:
 		for server in nc.servers.list():
                         if server.id in instance_ids:
                             if server.metadata and 'ansible_host_group' in server.metadata:
-                                    hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0]
+                                    #hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0]
+                                    hostname = server.name
                                     # Set Ansible Host Group
                                     if server.metadata['ansible_host_group'] in inventory:
                                             inventory[server.metadata['ansible_host_group']].append(hostname)
@@ -183,6 +184,8 @@ class OpenStackConnection:
                                             inventory[server.metadata['ansible_host_group']] = [hostname]
                                     # Set the other host variables
                                     inventory['_meta']['hostvars'][hostname] = {}
+                                    inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = server.networks.values()[0][0]
+                                    inventory['_meta']['hostvars'][hostname]['ansible_remote_tmp'] = '/tmp/ansible'
                                     for key in server.metadata.keys():
                                         if 'ansible_ssh' in key:
                                             inventory['_meta']['hostvars'][hostname][key] = server.metadata[key]
diff --git a/roles/dump_ldap_config/tasks/main.yml b/roles/dump_ldap_config/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7e779f14f3d129fdba8342a9a85981018d7407d4
--- /dev/null
+++ b/roles/dump_ldap_config/tasks/main.yml
@@ -0,0 +1,11 @@
+---
+- name: grab cacert
+  shell: cat /etc/openldap/certs/cacert.pem
+  register: ldapCaCertContents
+
+- name: dump vars
+  template: src=ldapConfig.j2 dest=/tmp/ldapConfig.out
+
+- name: fetch vars
+  fetch: src=/tmp/ldapConfig.out dest=/tmp/ldapConfig.out flat=yes
+
diff --git a/roles/dump_ldap_config/templates/ldapConfig.j2 b/roles/dump_ldap_config/templates/ldapConfig.j2
new file mode 100644
index 0000000000000000000000000000000000000000..158e144f17a0dacb02d425206d5d87dccd4d72c5
--- /dev/null
+++ b/roles/dump_ldap_config/templates/ldapConfig.j2
@@ -0,0 +1,15 @@
+---
+ldapServerHostIpLine: "{{ ansible_eth0.ipv4.address }} {{ ansible_fqdn }}"
+ldapCaCertContents: |
+{% for l in  ldapCaCertContents.stdout_lines %}
+  {{ l }}
+{% endfor %}
+ldapCaCertFile: /etc/ssl/certs/cacert.crt
+ldapDomain: "{{ ldapDomain }}"
+ldapURI: "ldaps://{{ ansible_fqdn }}:636"
+ldapBindDN: "{{ ldapBindDN }}"
+ldapBindDNPassword: "{{ ldapBindDNPassword }}"
+ldapBase: "{{ ldapBase }}"
+ldapGroupBase: "{{ ldapGroupBase }}"
+ldapRfc2307Pam: ""
+ldap_access_filter: "(objectClass=posixAccount)"
diff --git a/roles/easy-rsa-certificate/tasks/buildCert.yml b/roles/easy-rsa-certificate/tasks/buildCert.yml
index c9b2f9cdd52434a9159d07e3c7175d20221f2c7d..32f5a06f62d3461e0e9f63a2c6c1fa0a55c09c76 100644
--- a/roles/easy-rsa-certificate/tasks/buildCert.yml
+++ b/roles/easy-rsa-certificate/tasks/buildCert.yml
@@ -2,6 +2,7 @@
 - name: "Check client ca certificate"
   register: ca_cert
   stat: "path={{ x509_cacert_file }}"
+  sudo: true
 
 - name: "Check certificate and key"
   shell: (openssl x509 -noout -modulus -in {{ x509_cert_file }}  | openssl md5 ; openssl rsa -noout -modulus -in {{ x509_key_file }} | openssl md5) | uniq | wc -l
@@ -46,7 +47,7 @@
   when: needcert
 
 - name: "Creating CSR"
-  shell: " cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}"
+  shell: "cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}"
   when: needcert
   sudo: true
 
diff --git a/roles/easy-rsa-common/tasks/installEasyRsa.yml b/roles/easy-rsa-common/tasks/installEasyRsa.yml
index af050cf98aac4afc8b9279cda6915f73dd0b14f9..e66e88a25ef98ac472e9054b2f0c45db7ce44f92 100644
--- a/roles/easy-rsa-common/tasks/installEasyRsa.yml
+++ b/roles/easy-rsa-common/tasks/installEasyRsa.yml
@@ -6,7 +6,7 @@
   when: ansible_os_family == 'RedHat'
 - 
   name: "Installing easy-rsa"
-  apt: "name=openvpn state=present"
+  apt: "name=openvpn state=present update_cache=yes"
   sudo: True
   when: ansible_os_family == 'Debian'
 - 
diff --git a/roles/easy-rsa-common/tasks/main.yml b/roles/easy-rsa-common/tasks/main.yml
index 39760c7a8eb9bd142666798adf73dce0a4706b37..619f8806e8dadd278023cf1dc38f7160f5a10e7e 100644
--- a/roles/easy-rsa-common/tasks/main.yml
+++ b/roles/easy-rsa-common/tasks/main.yml
@@ -3,6 +3,3 @@
   include: installEasyRsa.yml
 -
   include: copyConfigurationFile.yml
--
-  include: yumList.yml
-
diff --git a/roles/karaage2.7/meta/main.yml b/roles/karaage2.7/meta/main.yml
index f6f4f5b9a630b628a2bd4c77cd16f924f8c90434..11e79807cceb222f4d145464d2f0db20a1aefec7 100644
--- a/roles/karaage2.7/meta/main.yml
+++ b/roles/karaage2.7/meta/main.yml
@@ -1,3 +1,3 @@
 ---
 dependencies:
-    - { role: easy-rsa-certificate, x509_csr_args: "--server" }
+    - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" }
diff --git a/roles/karaage2.7/tasks/main.yml b/roles/karaage2.7/tasks/main.yml
index fe3197637d6756bbead854ed6806e26fa78ee0a7..279b5d5250b99d667a2b51ec9b1e4f881760343a 100644
--- a/roles/karaage2.7/tasks/main.yml
+++ b/roles/karaage2.7/tasks/main.yml
@@ -1,5 +1,5 @@
 ---
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml"
 
 - name: install system packages apt
   apt: name={{ item }} state=installed update_cache=true
diff --git a/roles/ldapserver/meta/main.yml b/roles/ldapserver/meta/main.yml
index 46f5a2316b48320534f9e99db594e1bb61d34744..11e79807cceb222f4d145464d2f0db20a1aefec7 100644
--- a/roles/ldapserver/meta/main.yml
+++ b/roles/ldapserver/meta/main.yml
@@ -1,3 +1,3 @@
 ---
 dependencies:
-  - { role: easy-rsa-certificate, x509_csr_args: "--server" }
+    - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" }
diff --git a/roles/ldapserver/tasks/main.yml b/roles/ldapserver/tasks/main.yml
index a5ec4d6992069cba7b5dcc69e2d51c57829d9e29..41631b452680e59475e3bd65229f7d23123094cf 100644
--- a/roles/ldapserver/tasks/main.yml
+++ b/roles/ldapserver/tasks/main.yml
@@ -1,6 +1,6 @@
 ---
 
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml"
 
 - name: install system packages apt
   apt: name={{ item }} state=installed update_cache=true
diff --git a/roles/nfs-client/tasks/mountFileSystem.yml b/roles/nfs-client/tasks/mountFileSystem.yml
index 8d62f720e24e09d0e58acbbbe35b1072b810f1be..4a08034045c117019e6eb266e8d0cb67b3f3ec22 100644
--- a/roles/nfs-client/tasks/mountFileSystem.yml
+++ b/roles/nfs-client/tasks/mountFileSystem.yml
@@ -4,10 +4,41 @@
 #  with_items: exportList
 #  register: result 
   
+- name: "stop fail2ban"
+  service: name=fail2ban state=stopped
+  sudo: true
+
+- name: restart idmap 
+  service: name={{ item }} state=restarted
+  with_items:
+    - rpcbind
+    - rpcidmapd
+  sudo: true
+
 - name: "Mounting NFS mounts"
-  mount: name={{ item.name }} src={{ hostvars[nfs_server]['ansible_'+item.interface]['ipv4']['address'] }}:{{ item.src }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
+  mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
   with_items: exportList 
   notify: "restart authentication"
   notify: "restart idmap"
   sudo: true 
+  ignore_errors: true
+  register: firstMount
   when: exportList is defined 
+
+- name: "Wait for nfs to stabailse"
+  command: sleep 60
+  delegate_to: 127.0.0.1
+  when: firstMount | failed
+
+- name: "Mounting NFS mounts"
+  mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
+  with_items: exportList 
+  notify: "restart authentication"
+  notify: "restart idmap"
+  sudo: true 
+  when: exportList is defined and firstMount | failed
+
+- name: "restart fail2ban"
+  service: name=fail2ban state=started
+  sudo: true
+
diff --git a/roles/nfs-server/tasks/main.yml b/roles/nfs-server/tasks/main.yml
index 3e60a572484f4ba692e7884469d80acc1315f1de..29b98a51f78f9679387544cdcec27a1711a2383d 100644
--- a/roles/nfs-server/tasks/main.yml
+++ b/roles/nfs-server/tasks/main.yml
@@ -1,4 +1,3 @@
 ---
 - include: mkFilesystem.yml 
-- include: fileSymbolicLink.yml
 - include: startServer.yml
diff --git a/roles/openLdapClient/tasks/configLdapClient.yml b/roles/openLdapClient/tasks/configLdapClient.yml
index 21aa7d2501596bc4a864cc3abb77d39da32f0045..b55b502410dfecc2def3bdc83898265f623c37c8 100644
--- a/roles/openLdapClient/tasks/configLdapClient.yml
+++ b/roles/openLdapClient/tasks/configLdapClient.yml
@@ -6,22 +6,41 @@
     - nsswitch.conf
   sudo: true
 
+- name: "get cert dir"
+  shell: "dirname {{ ldapCaCertFile }}"
+  delegate_to: localhost
+  run_once: true
+  register: ldapCaCertDir
+
+- name: "make basedir"
+  file: path={{ ldapCaCertDir.stdout }} state=directory owner=root
+  sudo: true
+
 - name: "Copy the CA cert"
   copy: src={{ ldapCaCertSrc }} dest={{ ldapCaCertFile }} owner=root mode=644
   sudo: true
   when: ldapCaCertSrc is defined
 
+- name: "Template CA cert"
+  template: src=ldapCaCert.j2 dest={{ ldapCaCertFile }} owner=root mode=644
+  sudo: true
+  when: ldapCaCertContents is defined
+
+- name: "Copy pam config to ldap client"
+  template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth
+  sudo: true
+
 - name: "Copy pam config to ldap client"
-  template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth-ac
+  template: src=password-auth.j2 dest=/etc/pam.d/password-auth
   sudo: true
 
 - name: "Copy system auth to ldap client"
   template: src=authconfig.j2 dest=/etc/sysconfig/authconfig
   sudo: true
 
-- name: "Copy ldap.conf file "
-  template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf
-  sudo: true
+#- name: "Copy ldap.conf file "
+#  template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf
+#  sudo: true
 
 - name: "Add LDAP server IP address to /etc/hosts"
   lineinfile: dest=/etc/hosts line="{{ ldapServerHostIpLine }}" state=present insertafter=EOF
@@ -33,4 +52,8 @@
   sudo: true
   notify: restart sssd
 
+- name: "start sssd"
+  service: name=sssd state=started
+  sudo: true
+
 
diff --git a/roles/openLdapClient/tasks/installOpenLdap.yml b/roles/openLdapClient/tasks/installOpenLdap.yml
index 29f085f6542e957bad01800f19da712ee3ac34fb..659a86de958d2b883c2feefa98d399be0764d4dd 100644
--- a/roles/openLdapClient/tasks/installOpenLdap.yml
+++ b/roles/openLdapClient/tasks/installOpenLdap.yml
@@ -2,15 +2,15 @@
 - name: "Install open ldap package yum"
   action: yum pkg={{ item }} state=installed 
   with_items:
-    - openldap
-    - openldap-clients
+      #    - openldap
+      #    - openldap-clients
     - sssd
     - sssd-common
     - sssd-client
     - nss
     - nss-tools
-    - nss-pam-ldapd
-    - pam_ldap
+      #    - nss-pam-ldapd
+      #    - pam_ldap
   sudo: true
   when: ansible_os_family == 'RedHat'
 
diff --git a/roles/openLdapClient/templates/authconfig.j2 b/roles/openLdapClient/templates/authconfig.j2
index 20c2b7f549afdb64c527f7175b02d0a65d3616a9..de5a087e95f105998e109f452d0a29a9bfe95972 100644
--- a/roles/openLdapClient/templates/authconfig.j2
+++ b/roles/openLdapClient/templates/authconfig.j2
@@ -2,7 +2,7 @@ IPADOMAINJOINED=no
 USEMKHOMEDIR=no
 USEPAMACCESS=no
 CACHECREDENTIALS=yes
-USESSSDAUTH=no
+USESSSDAUTH=yes
 USESHADOW=yes
 USEWINBIND=no
 USEDB=no
@@ -10,7 +10,7 @@ FORCELEGACY=no
 USEFPRINTD=yes
 FORCESMARTCARD=no
 PASSWDALGORITHM=sha512
-USELDAPAUTH=yes
+USELDAPAUTH=no
 USEPASSWDQC=no
 IPAV2NONTP=no
 USELOCAUTHORIZE=yes
@@ -18,9 +18,9 @@ USECRACKLIB=yes
 USEIPAV2=no
 USEWINBINDAUTH=no
 USESMARTCARD=no
-USELDAP=yes
+USELDAP=no
 USENIS=no
 USEKERBEROS=no
 USESYSNETAUTH=no
-USESSSD=no
+USESSSD=yes
 USEHESIOD=no
diff --git a/roles/openLdapClient/templates/ldapCaCert.j2 b/roles/openLdapClient/templates/ldapCaCert.j2
new file mode 100644
index 0000000000000000000000000000000000000000..35383b2f3ea87b0c04750ba1f07344179ad7e630
--- /dev/null
+++ b/roles/openLdapClient/templates/ldapCaCert.j2
@@ -0,0 +1 @@
+{{ ldapCaCertContents }}
diff --git a/roles/openLdapClient/templates/password-auth.j2 b/roles/openLdapClient/templates/password-auth.j2
new file mode 100644
index 0000000000000000000000000000000000000000..b849fdeacc9014fe38215bdf64732ede07597c36
--- /dev/null
+++ b/roles/openLdapClient/templates/password-auth.j2
@@ -0,0 +1,25 @@
+# This file is auto-generated.
+# User changes will be destroyed the next time authconfig is run.
+auth        required      pam_env.so
+auth        sufficient    pam_unix.so nullok try_first_pass
+auth        requisite     pam_succeed_if.so uid >= 500 quiet
+auth        sufficient    pam_sss.so use_first_pass
+auth        required      pam_deny.so
+
+account     required      pam_unix.so
+account     sufficient    pam_localuser.so
+account     sufficient    pam_succeed_if.so uid < 500 quiet
+account     [default=bad success=ok user_unknown=ignore] pam_sss.so
+account     required      pam_permit.so
+
+password    requisite     pam_cracklib.so try_first_pass retry=3
+password    sufficient    pam_unix.so md5 shadow nullok try_first_pass use_authtok
+password    sufficient    pam_sss.so use_authtok
+password    required      pam_deny.so
+
+session     optional      pam_keyinit.so revoke
+session     required      pam_limits.so
+session     [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid
+session     required      pam_unix.so
+session     optional      pam_sss.so
+
diff --git a/roles/openLdapClient/templates/sssd.j2 b/roles/openLdapClient/templates/sssd.j2
index 9b7f8dbc8ed538cd0cd37ecde5c78a67bbf28f4e..05c9acf40eb1062dc8255042ff398ca9ea63ae98 100644
--- a/roles/openLdapClient/templates/sssd.j2
+++ b/roles/openLdapClient/templates/sssd.j2
@@ -27,6 +27,7 @@ ldap_tls_cacert = {{ ldapCaCertFile }}
 ldap_default_bind_dn = {{ ldapBindDN }} 
 ldap_default_authtok_type = password
 ldap_default_authtok = {{ ldapBindDNPassword }} 
+ldap_access_filter = {{ ldap_access_filter }}
 
 {{ ldapRfc2307 }}
 
diff --git a/roles/openLdapClient/templates/system-auth-ac.j2 b/roles/openLdapClient/templates/system-auth-ac.j2
index 4c96e491eb2eff88c8b5e416941bf65d021c7a95..2f9036e24bd3d33b4626af796c1427a168ad3de3 100644
--- a/roles/openLdapClient/templates/system-auth-ac.j2
+++ b/roles/openLdapClient/templates/system-auth-ac.j2
@@ -4,21 +4,21 @@
 auth        required      pam_env.so
 auth        sufficient    pam_unix.so nullok try_first_pass
 auth        requisite     pam_succeed_if.so uid >= 500 quiet
-auth        sufficient    pam_ldap.so use_first_pass
+auth        sufficient    pam_sss.so use_first_pass
 auth        required      pam_deny.so
 
 account     required      pam_unix.so broken_shadow
 account     sufficient    pam_succeed_if.so uid < 500 quiet
-account     [default=bad success=ok user_unknown=ignore] pam_ldap.so
+account     [default=bad success=ok user_unknown=ignore] pam_sss.so
 account     required      pam_permit.so
 
 password    requisite     pam_cracklib.so try_first_pass retry=3
 password    sufficient    pam_unix.so md5 shadow nullok try_first_pass use_authtok
-password    sufficient    pam_ldap.so use_authtok
+password    sufficient    pam_sss.so use_authtok
 password    required      pam_deny.so
 
 session     optional      pam_keyinit.so revoke
 session     required      pam_limits.so
 session     [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid
 session     required      pam_unix.so
-session     optional      pam_ldap.so
+session     optional      pam_sss.so
diff --git a/roles/slurm/tasks/main.yml b/roles/slurm/tasks/main.yml
index d351e4603f55fc1c368adc881526ff4971fae576..b14c7401b7b8460b4ec905ba795a02232d4a37de 100644
--- a/roles/slurm/tasks/main.yml
+++ b/roles/slurm/tasks/main.yml
@@ -16,11 +16,11 @@
   sudo: true
 
 - name: create slurm group
-  group: name=slurm
+  group: name=slurm system=yes
   sudo: true
 
 - name: create slurm user
-  user: name=slurm group=slurm
+  user: name=slurm group=slurm system=yes
   sudo: true
 
 - name: install slurm rpms
diff --git a/roles/ssh-password-login/handlers/main.yml b/roles/ssh-password-login/handlers/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7141e8ee5afee44b78216875e7dc097d679d9661
--- /dev/null
+++ b/roles/ssh-password-login/handlers/main.yml
@@ -0,0 +1,3 @@
+- name: "restart sshd"
+  service: name=sshd state=restarted
+  sudo: true
diff --git a/roles/ssh-password-login/tasks/main.yml b/roles/ssh-password-login/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9ea2baa20af22f2345044cd4c3a20b5b495acc13
--- /dev/null
+++ b/roles/ssh-password-login/tasks/main.yml
@@ -0,0 +1,20 @@
+- name: "Enable Challenge Response"
+  lineinfile:
+  args:
+    dest: /etc/ssh/sshd_config
+    regexp: "ChallengeResponseAuthentication no"
+    line: "ChallengeResponseAuthentication yes" 
+    backrefs: yes
+  sudo: true
+  notify: restart sshd
+
+- name: "Enable Challenge Response"
+  lineinfile:
+  args:
+    dest: /etc/ssh/sshd_config
+    regexp: "PasswordAuthentication no"
+    line: "PasswordAuthentication yes"
+    backrefs: yes
+  sudo: true
+  notify: restart sshd
+
diff --git a/roles/strudel_config/tasks/main.yml b/roles/strudel_config/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..b7bec4214ecaa420e962160d4c050170ba411894
--- /dev/null
+++ b/roles/strudel_config/tasks/main.yml
@@ -0,0 +1,8 @@
+- name: "Set login node"
+  set_fact: 
+    loginNode: "{{ ansible_eth0.ipv4.address }}"
+
+- name: "Temlate Strudel config"
+  template: src=generic_slurm_config.json.j2 dest=/tmp/Strudel_Desktops.json
+  delegate_to: 127.0.0.1
+  run_once: True
diff --git a/roles/strudel_config/templates/generic_slurm_config.json.j2 b/roles/strudel_config/templates/generic_slurm_config.json.j2
new file mode 100644
index 0000000000000000000000000000000000000000..3acb443e96311ceec9d09bed0739a2be95c32884
--- /dev/null
+++ b/roles/strudel_config/templates/generic_slurm_config.json.j2
@@ -0,0 +1,452 @@
+[
+    [
+        "GenericDesktops"
+    ],
+    {
+        "GenericDesktops": {
+            "__class__": "siteConfig",
+            "__module__": "siteConfig",
+            "agent": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -l {username} {execHost} \"echo agent_hello; bash \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "agent_hello"
+                ],
+                "requireMatch": true
+            },
+            "authURL": null,
+            "authorizedKeysFile": null,
+            "dbusSessionBusAddress": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DISPLAY={vncDisplay};timeout 15 /usr/local/bin/cat_dbus_session_file.sh'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^DBUS_SESSION_BUS_ADDRESS=(?P<dbusSessionBusAddress>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "defaults": {
+                "jobParams_hours": 48,
+                "jobParams_mem": 4,
+                "jobParams_ppn": 1
+            },
+            "directConnect": true,
+            "displayStrings": {
+                "__class__": "sshKeyDistDisplayStrings",
+                "__module__": "siteConfig",
+                "createNewKeyDialogNewPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.",
+                "createNewKeyDialogNewPassphraseMismatch": "Passphrases don't match!",
+                "createNewKeyDialogNewPassphraseTooShort": "Passphrase is too short.",
+                "helpEmailAddress": "help@massive.org.au",
+                "networkError": "It looks like a network error has occured. You may be able to resume your work by logging in again.",
+                "newPassphrase": "It looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseMismatch": "Sorry, the two passphrases you entered don't match.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseTitle": "Please enter a new passphrase",
+                "newPassphraseTooShort": "Sorry, the passphrase must be at least six characters.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "passphrasePrompt": "Please enter the passphrase for your SSH key",
+                "passphrasePromptIncorrect": "Sorry, that passphrase was incorrect.\nPlease enter the passphrase for you SSH Key\nIf you have forgoten the passphrase for you key, you may need to delete it and create a new key.\nYou can find this option under the Identity menu.\n",
+                "passphrasePromptIncorrectl": "Sorry, that passphrase was incorrect. Please enter the passphrase for your ssh key",
+                "passwdPrompt": "Please enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users",
+                "passwdPromptIncorrect": "Sorry, that password was incorrect.\nPlease enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users",
+                "persistentMessage": "Would you like to leave your current session running so that you can reconnect later?",
+                "persistentMessagePersist": "Leave it running",
+                "persistentMessageStop": "Stop the desktop",
+                "qdelQueuedJob": "It looks like you've been waiting for a job to start.\nDo you want me to delete the job or leave it in the queue so you can reconnect later?\n",
+                "qdelQueuedJobNOOP": "Leave it in the queue (I'll reconnect later)",
+                "qdelQueuedJobQdel": "Delete the job",
+                "reconnectMessage": "An Existing Desktop was found. Would you like to reconnect or kill it and start a new desktop?",
+                "reconnectMessageNo": "New desktop",
+                "reconnectMessageYes": "Reconnect",
+                "temporaryKey": "\nWould you like to use the launchers old behaviour (entering a password every time you start a new desktop) or try the new behaviour (creating an ssh key pair and entering a passphrase the first time you use the launcher after reboot.)\n\nPasswords are recomended if this is a shared user account.\n\nSSH Keys are recommended if you are the only person who uses this account.\n\nThis option can be changed from the Identity menu.\n",
+                "temporaryKeyNo": "Use my SSH Key",
+                "temporaryKeyYes": "Use my password every time"
+            },
+            "displayWebDavInfoDialogOnRemoteDesktop": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'echo -e \\\"You can access your local home directory in Nautilus File Browser, using the location:\\n\\ndav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\n\\nYour one-time password is {vncPasswd}\\\" > ~/.vnc/\\$(hostname){vncDisplay}-webdav.txt'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "execHost": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^(?P<execHost>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "getProjects": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"groups | sed 's@ @\\n@g'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^\\s*(?P<group>\\S+)\\s*$"
+                ],
+                "requireMatch": true
+            },
+            "imageid": null,
+            "instanceFlavour": null,
+            "listAll": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "squeue -u {username} -o \\\"%i %L\\\" | tail -n -1",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "(?P<jobid>(?P<jobidNumber>[0-9]+)) (?P<remainingWalltime>.*)$"
+                ],
+                "requireMatch": false
+            },
+            "loginHost": "{{ loginNode }}",
+            "messageRegexs": [
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^INFO:(?P<info>.*(?:\n|\r\n?))"
+                },
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^WARN:(?P<warn>.*(?:\n|\r\n?))"
+                },
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^ERROR:(?P<error>.*(?:\n|\r\n?))"
+                }
+            ],
+            "onConnectScript": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'module load keyutility ; mountUtility.py'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "openWebDavShareInRemoteFileBrowser": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};/usr/bin/gconftool-2 --type=Boolean --set /apps/nautilus/preferences/always_use_location_entry true {ampersand}{ampersand} DISPLAY={vncDisplay} xdg-open dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\\"\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "otp": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "'cat ~/.vnc/clearpass'",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^(?P<vncPasswd>\\S+)$"
+                ],
+                "requireMatch": true
+            },
+            "provision": null,
+            "relabel": {},
+            "runSanityCheck": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "running": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scontrol show job {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "JobState=RUNNING"
+                ],
+                "requireMatch": true
+            },
+            "setDisplayResolution": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "showStart": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "siteRanges": {
+                "jobParams_hours": [
+                    1,
+                    336
+                ],
+                "jobParams_mem": [
+                    1,
+                    1024
+                ],
+                "jobParams_nodes": [
+                    1,
+                    10
+                ],
+                "jobParams_ppn": [
+                    1,
+                    12
+                ]
+            },
+            "startServer": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; module load turbovnc ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\n/usr/local/bin/vncsession --vnc turbovnc --geometry {resolution} ; sleep 36000000 ' |  sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^Submitted batch job (?P<jobid>(?P<jobidNumber>[0-9]+))$"
+                ],
+                "requireMatch": true
+            },
+            "stop": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scancel {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "stopForRestart": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scancel {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "tunnel": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -L {localPortNumber}:localhost:{remotePortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "tunnel_hello"
+                ],
+                "requireMatch": true
+            },
+            "username": null,
+            "visibility": {
+                "advancedCheckBoxPanel": true,
+                "cipherPanel": "Advanced",
+                "debugCheckBoxPanel": "Advanced",
+                "jobParams_hours": true,
+                "jobParams_nodes": true,
+                "jobParams_ppn": true,
+                "label_hours": true,
+                "label_nodes": true,
+                "label_ppn": true,
+                "resolutionPanel": "Advanced",
+                "resourcePanel": "Advanced",
+                "usernamePanel": true
+            },
+            "vncDisplay": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"cat .vnc/slurm-{jobidNumber}.out\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^.*?started on display \\S+(?P<vncDisplay>:[0-9]+)\\s*$"
+                ],
+                "requireMatch": true
+            },
+            "webDavCloseWindow": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay}; wmctrl -F -i -c {webDavWindowID}'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "webDavIntermediatePort": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^(?P<intermediateWebDavPortNumber>[0-9]+)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavMount": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};echo \\\\\\\"import pexpect;child = pexpect.spawn('gvfs-mount dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}');child.expect('Password: ');child.sendline('{vncPasswd}');child.expect(pexpect.EOF);child.close();print 'gvfs-mount returned ' + str(child.exitstatus)\\\\\\\" {pipe} python\\\"\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^gvfs-mount returned (?P<webDavMountingExitCode>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavRemotePort": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^(?P<remoteWebDavPortNumber>[0-9]+)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavTunnel": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -oExitOnForwardFailure=yes -R {remoteWebDavPortNumber}:localhost:{localWebDavPortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "tunnel_hello"
+                ],
+                "requireMatch": true
+            },
+            "webDavUnmount": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay};timeout 1 gvfs-mount --unmount-scheme dav'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "webDavWindowID": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress}; DISPLAY={vncDisplay} xwininfo -root -tree'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^\\s+(?P<webDavWindowID>\\S+)\\s+\"{homeDirectoryWebDavShareName}.*Browser.*$"
+                ],
+                "requireMatch": true
+            }
+        }
+    }
+]
\ No newline at end of file
diff --git a/roles/syncExports/tasks/addExports.yml b/roles/syncExports/tasks/addExports.yml
index d3723e786ef615eb1224bfb4ce0b435ed74fdc1f..0ea7e7c9a5b0a399a0a4d64c0d8188e9e71f3844 100644
--- a/roles/syncExports/tasks/addExports.yml
+++ b/roles/syncExports/tasks/addExports.yml
@@ -4,4 +4,17 @@
   delegate_to: "{{ nfs_server }}"
   run_once: true
   sudo: true
-  notify: "Reload exports"
+
+# Do not do this as a handler, instead do this here as a task so that it happens imediatly after the exports file is created before any clients
+# attempt a mount
+- name : "Reload exports"
+  command: exportfs -ra
+  delegate_to: "{{ nfs_server }}"
+  run_once: true
+  sudo: true
+
+- name : "Pause ... clients sometimes have errors"
+  command: sleep 60
+  delegate_to: "{{ nfs_server }}"
+  run_once: true
+  sudo: true
diff --git a/roles/vncserver/tasks/main.yml b/roles/vncserver/tasks/main.yml
index b4df6889cdd83c26b50f59f044a849bc2e82c8da..1509b03ac37cf5dfc26d6148af16cf2094023bbd 100644
--- a/roles/vncserver/tasks/main.yml
+++ b/roles/vncserver/tasks/main.yml
@@ -1,5 +1,6 @@
 ---
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml"
+#- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_architecture }}.yml"
 
 - name: add repos apt
   shell: "add-apt-repository -y 'deb {{ item }} {{ ansible_distribution_release }} main' "