From d7363d4fdbbfabe2ea8d5670b19d0ccf1f8868a2 Mon Sep 17 00:00:00 2001
From: Chris Hines <chris.hines@monash.edu>
Date: Tue, 3 Feb 2015 01:53:07 +0000
Subject: [PATCH] a whole bunch of updates to make ansible work correctly for
 my test cluster

---
 dynamicInventory                              |   5 +-
 roles/dump_ldap_config/tasks/main.yml         |  11 +
 .../dump_ldap_config/templates/ldapConfig.j2  |  15 +
 .../easy-rsa-certificate/tasks/buildCert.yml  |   3 +-
 .../easy-rsa-common/tasks/installEasyRsa.yml  |   2 +-
 roles/easy-rsa-common/tasks/main.yml          |   3 -
 roles/karaage2.7/meta/main.yml                |   2 +-
 roles/karaage2.7/tasks/main.yml               |   2 +-
 roles/ldapserver/meta/main.yml                |   2 +-
 roles/ldapserver/tasks/main.yml               |   2 +-
 roles/nfs-client/tasks/mountFileSystem.yml    |  33 +-
 roles/nfs-server/tasks/main.yml               |   1 -
 .../openLdapClient/tasks/configLdapClient.yml |  31 +-
 .../openLdapClient/tasks/installOpenLdap.yml  |   8 +-
 roles/openLdapClient/templates/authconfig.j2  |   8 +-
 roles/openLdapClient/templates/ldapCaCert.j2  |   1 +
 .../openLdapClient/templates/password-auth.j2 |  25 +
 roles/openLdapClient/templates/sssd.j2        |   1 +
 .../templates/system-auth-ac.j2               |   8 +-
 roles/slurm/tasks/main.yml                    |   4 +-
 roles/ssh-password-login/handlers/main.yml    |   3 +
 roles/ssh-password-login/tasks/main.yml       |  20 +
 roles/strudel_config/tasks/main.yml           |   8 +
 .../templates/generic_slurm_config.json.j2    | 452 ++++++++++++++++++
 roles/syncExports/tasks/addExports.yml        |  15 +-
 roles/vncserver/tasks/main.yml                |   3 +-
 26 files changed, 636 insertions(+), 32 deletions(-)
 create mode 100644 roles/dump_ldap_config/tasks/main.yml
 create mode 100644 roles/dump_ldap_config/templates/ldapConfig.j2
 create mode 100644 roles/openLdapClient/templates/ldapCaCert.j2
 create mode 100644 roles/openLdapClient/templates/password-auth.j2
 create mode 100644 roles/ssh-password-login/handlers/main.yml
 create mode 100644 roles/ssh-password-login/tasks/main.yml
 create mode 100644 roles/strudel_config/tasks/main.yml
 create mode 100644 roles/strudel_config/templates/generic_slurm_config.json.j2

diff --git a/dynamicInventory b/dynamicInventory
index c0bda20..cd93e15 100755
--- a/dynamicInventory
+++ b/dynamicInventory
@@ -175,7 +175,8 @@ class OpenStackConnection:
 		for server in nc.servers.list():
                         if server.id in instance_ids:
                             if server.metadata and 'ansible_host_group' in server.metadata:
-                                    hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0]
+                                    #hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0]
+                                    hostname = server.name
                                     # Set Ansible Host Group
                                     if server.metadata['ansible_host_group'] in inventory:
                                             inventory[server.metadata['ansible_host_group']].append(hostname)
@@ -183,6 +184,8 @@ class OpenStackConnection:
                                             inventory[server.metadata['ansible_host_group']] = [hostname]
                                     # Set the other host variables
                                     inventory['_meta']['hostvars'][hostname] = {}
+                                    inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = server.networks.values()[0][0]
+                                    inventory['_meta']['hostvars'][hostname]['ansible_remote_tmp'] = '/tmp/ansible'
                                     for key in server.metadata.keys():
                                         if 'ansible_ssh' in key:
                                             inventory['_meta']['hostvars'][hostname][key] = server.metadata[key]
diff --git a/roles/dump_ldap_config/tasks/main.yml b/roles/dump_ldap_config/tasks/main.yml
new file mode 100644
index 0000000..7e779f1
--- /dev/null
+++ b/roles/dump_ldap_config/tasks/main.yml
@@ -0,0 +1,11 @@
+---
+- name: grab cacert
+  shell: cat /etc/openldap/certs/cacert.pem
+  register: ldapCaCertContents
+
+- name: dump vars
+  template: src=ldapConfig.j2 dest=/tmp/ldapConfig.out
+
+- name: fetch vars
+  fetch: src=/tmp/ldapConfig.out dest=/tmp/ldapConfig.out flat=yes
+
diff --git a/roles/dump_ldap_config/templates/ldapConfig.j2 b/roles/dump_ldap_config/templates/ldapConfig.j2
new file mode 100644
index 0000000..158e144
--- /dev/null
+++ b/roles/dump_ldap_config/templates/ldapConfig.j2
@@ -0,0 +1,15 @@
+---
+ldapServerHostIpLine: "{{ ansible_eth0.ipv4.address }} {{ ansible_fqdn }}"
+ldapCaCertContents: |
+{% for l in  ldapCaCertContents.stdout_lines %}
+  {{ l }}
+{% endfor %}
+ldapCaCertFile: /etc/ssl/certs/cacert.crt
+ldapDomain: "{{ ldapDomain }}"
+ldapURI: "ldaps://{{ ansible_fqdn }}:636"
+ldapBindDN: "{{ ldapBindDN }}"
+ldapBindDNPassword: "{{ ldapBindDNPassword }}"
+ldapBase: "{{ ldapBase }}"
+ldapGroupBase: "{{ ldapGroupBase }}"
+ldapRfc2307Pam: ""
+ldap_access_filter: "(objectClass=posixAccount)"
diff --git a/roles/easy-rsa-certificate/tasks/buildCert.yml b/roles/easy-rsa-certificate/tasks/buildCert.yml
index c9b2f9c..32f5a06 100644
--- a/roles/easy-rsa-certificate/tasks/buildCert.yml
+++ b/roles/easy-rsa-certificate/tasks/buildCert.yml
@@ -2,6 +2,7 @@
 - name: "Check client ca certificate"
   register: ca_cert
   stat: "path={{ x509_cacert_file }}"
+  sudo: true
 
 - name: "Check certificate and key"
   shell: (openssl x509 -noout -modulus -in {{ x509_cert_file }}  | openssl md5 ; openssl rsa -noout -modulus -in {{ x509_key_file }} | openssl md5) | uniq | wc -l
@@ -46,7 +47,7 @@
   when: needcert
 
 - name: "Creating CSR"
-  shell: " cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}"
+  shell: "cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}"
   when: needcert
   sudo: true
 
diff --git a/roles/easy-rsa-common/tasks/installEasyRsa.yml b/roles/easy-rsa-common/tasks/installEasyRsa.yml
index af050cf..e66e88a 100644
--- a/roles/easy-rsa-common/tasks/installEasyRsa.yml
+++ b/roles/easy-rsa-common/tasks/installEasyRsa.yml
@@ -6,7 +6,7 @@
   when: ansible_os_family == 'RedHat'
 - 
   name: "Installing easy-rsa"
-  apt: "name=openvpn state=present"
+  apt: "name=openvpn state=present update_cache=yes"
   sudo: True
   when: ansible_os_family == 'Debian'
 - 
diff --git a/roles/easy-rsa-common/tasks/main.yml b/roles/easy-rsa-common/tasks/main.yml
index 39760c7..619f880 100644
--- a/roles/easy-rsa-common/tasks/main.yml
+++ b/roles/easy-rsa-common/tasks/main.yml
@@ -3,6 +3,3 @@
   include: installEasyRsa.yml
 -
   include: copyConfigurationFile.yml
--
-  include: yumList.yml
-
diff --git a/roles/karaage2.7/meta/main.yml b/roles/karaage2.7/meta/main.yml
index f6f4f5b..11e7980 100644
--- a/roles/karaage2.7/meta/main.yml
+++ b/roles/karaage2.7/meta/main.yml
@@ -1,3 +1,3 @@
 ---
 dependencies:
-    - { role: easy-rsa-certificate, x509_csr_args: "--server" }
+    - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" }
diff --git a/roles/karaage2.7/tasks/main.yml b/roles/karaage2.7/tasks/main.yml
index fe31976..279b5d5 100644
--- a/roles/karaage2.7/tasks/main.yml
+++ b/roles/karaage2.7/tasks/main.yml
@@ -1,5 +1,5 @@
 ---
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml"
 
 - name: install system packages apt
   apt: name={{ item }} state=installed update_cache=true
diff --git a/roles/ldapserver/meta/main.yml b/roles/ldapserver/meta/main.yml
index 46f5a23..11e7980 100644
--- a/roles/ldapserver/meta/main.yml
+++ b/roles/ldapserver/meta/main.yml
@@ -1,3 +1,3 @@
 ---
 dependencies:
-  - { role: easy-rsa-certificate, x509_csr_args: "--server" }
+    - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" }
diff --git a/roles/ldapserver/tasks/main.yml b/roles/ldapserver/tasks/main.yml
index a5ec4d6..41631b4 100644
--- a/roles/ldapserver/tasks/main.yml
+++ b/roles/ldapserver/tasks/main.yml
@@ -1,6 +1,6 @@
 ---
 
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml"
 
 - name: install system packages apt
   apt: name={{ item }} state=installed update_cache=true
diff --git a/roles/nfs-client/tasks/mountFileSystem.yml b/roles/nfs-client/tasks/mountFileSystem.yml
index 8d62f72..4a08034 100644
--- a/roles/nfs-client/tasks/mountFileSystem.yml
+++ b/roles/nfs-client/tasks/mountFileSystem.yml
@@ -4,10 +4,41 @@
 #  with_items: exportList
 #  register: result 
   
+- name: "stop fail2ban"
+  service: name=fail2ban state=stopped
+  sudo: true
+
+- name: restart idmap 
+  service: name={{ item }} state=restarted
+  with_items:
+    - rpcbind
+    - rpcidmapd
+  sudo: true
+
 - name: "Mounting NFS mounts"
-  mount: name={{ item.name }} src={{ hostvars[nfs_server]['ansible_'+item.interface]['ipv4']['address'] }}:{{ item.src }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
+  mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
   with_items: exportList 
   notify: "restart authentication"
   notify: "restart idmap"
   sudo: true 
+  ignore_errors: true
+  register: firstMount
   when: exportList is defined 
+
+- name: "Wait for nfs to stabailse"
+  command: sleep 60
+  delegate_to: 127.0.0.1
+  when: firstMount | failed
+
+- name: "Mounting NFS mounts"
+  mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
+  with_items: exportList 
+  notify: "restart authentication"
+  notify: "restart idmap"
+  sudo: true 
+  when: exportList is defined and firstMount | failed
+
+- name: "restart fail2ban"
+  service: name=fail2ban state=started
+  sudo: true
+
diff --git a/roles/nfs-server/tasks/main.yml b/roles/nfs-server/tasks/main.yml
index 3e60a57..29b98a5 100644
--- a/roles/nfs-server/tasks/main.yml
+++ b/roles/nfs-server/tasks/main.yml
@@ -1,4 +1,3 @@
 ---
 - include: mkFilesystem.yml 
-- include: fileSymbolicLink.yml
 - include: startServer.yml
diff --git a/roles/openLdapClient/tasks/configLdapClient.yml b/roles/openLdapClient/tasks/configLdapClient.yml
index 21aa7d2..b55b502 100644
--- a/roles/openLdapClient/tasks/configLdapClient.yml
+++ b/roles/openLdapClient/tasks/configLdapClient.yml
@@ -6,22 +6,41 @@
     - nsswitch.conf
   sudo: true
 
+- name: "get cert dir"
+  shell: "dirname {{ ldapCaCertFile }}"
+  delegate_to: localhost
+  run_once: true
+  register: ldapCaCertDir
+
+- name: "make basedir"
+  file: path={{ ldapCaCertDir.stdout }} state=directory owner=root
+  sudo: true
+
 - name: "Copy the CA cert"
   copy: src={{ ldapCaCertSrc }} dest={{ ldapCaCertFile }} owner=root mode=644
   sudo: true
   when: ldapCaCertSrc is defined
 
+- name: "Template CA cert"
+  template: src=ldapCaCert.j2 dest={{ ldapCaCertFile }} owner=root mode=644
+  sudo: true
+  when: ldapCaCertContents is defined
+
+- name: "Copy pam config to ldap client"
+  template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth
+  sudo: true
+
 - name: "Copy pam config to ldap client"
-  template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth-ac
+  template: src=password-auth.j2 dest=/etc/pam.d/password-auth
   sudo: true
 
 - name: "Copy system auth to ldap client"
   template: src=authconfig.j2 dest=/etc/sysconfig/authconfig
   sudo: true
 
-- name: "Copy ldap.conf file "
-  template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf
-  sudo: true
+#- name: "Copy ldap.conf file "
+#  template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf
+#  sudo: true
 
 - name: "Add LDAP server IP address to /etc/hosts"
   lineinfile: dest=/etc/hosts line="{{ ldapServerHostIpLine }}" state=present insertafter=EOF
@@ -33,4 +52,8 @@
   sudo: true
   notify: restart sssd
 
+- name: "start sssd"
+  service: name=sssd state=started
+  sudo: true
+
 
diff --git a/roles/openLdapClient/tasks/installOpenLdap.yml b/roles/openLdapClient/tasks/installOpenLdap.yml
index 29f085f..659a86d 100644
--- a/roles/openLdapClient/tasks/installOpenLdap.yml
+++ b/roles/openLdapClient/tasks/installOpenLdap.yml
@@ -2,15 +2,15 @@
 - name: "Install open ldap package yum"
   action: yum pkg={{ item }} state=installed 
   with_items:
-    - openldap
-    - openldap-clients
+      #    - openldap
+      #    - openldap-clients
     - sssd
     - sssd-common
     - sssd-client
     - nss
     - nss-tools
-    - nss-pam-ldapd
-    - pam_ldap
+      #    - nss-pam-ldapd
+      #    - pam_ldap
   sudo: true
   when: ansible_os_family == 'RedHat'
 
diff --git a/roles/openLdapClient/templates/authconfig.j2 b/roles/openLdapClient/templates/authconfig.j2
index 20c2b7f..de5a087 100644
--- a/roles/openLdapClient/templates/authconfig.j2
+++ b/roles/openLdapClient/templates/authconfig.j2
@@ -2,7 +2,7 @@ IPADOMAINJOINED=no
 USEMKHOMEDIR=no
 USEPAMACCESS=no
 CACHECREDENTIALS=yes
-USESSSDAUTH=no
+USESSSDAUTH=yes
 USESHADOW=yes
 USEWINBIND=no
 USEDB=no
@@ -10,7 +10,7 @@ FORCELEGACY=no
 USEFPRINTD=yes
 FORCESMARTCARD=no
 PASSWDALGORITHM=sha512
-USELDAPAUTH=yes
+USELDAPAUTH=no
 USEPASSWDQC=no
 IPAV2NONTP=no
 USELOCAUTHORIZE=yes
@@ -18,9 +18,9 @@ USECRACKLIB=yes
 USEIPAV2=no
 USEWINBINDAUTH=no
 USESMARTCARD=no
-USELDAP=yes
+USELDAP=no
 USENIS=no
 USEKERBEROS=no
 USESYSNETAUTH=no
-USESSSD=no
+USESSSD=yes
 USEHESIOD=no
diff --git a/roles/openLdapClient/templates/ldapCaCert.j2 b/roles/openLdapClient/templates/ldapCaCert.j2
new file mode 100644
index 0000000..35383b2
--- /dev/null
+++ b/roles/openLdapClient/templates/ldapCaCert.j2
@@ -0,0 +1 @@
+{{ ldapCaCertContents }}
diff --git a/roles/openLdapClient/templates/password-auth.j2 b/roles/openLdapClient/templates/password-auth.j2
new file mode 100644
index 0000000..b849fde
--- /dev/null
+++ b/roles/openLdapClient/templates/password-auth.j2
@@ -0,0 +1,25 @@
+# This file is auto-generated.
+# User changes will be destroyed the next time authconfig is run.
+auth        required      pam_env.so
+auth        sufficient    pam_unix.so nullok try_first_pass
+auth        requisite     pam_succeed_if.so uid >= 500 quiet
+auth        sufficient    pam_sss.so use_first_pass
+auth        required      pam_deny.so
+
+account     required      pam_unix.so
+account     sufficient    pam_localuser.so
+account     sufficient    pam_succeed_if.so uid < 500 quiet
+account     [default=bad success=ok user_unknown=ignore] pam_sss.so
+account     required      pam_permit.so
+
+password    requisite     pam_cracklib.so try_first_pass retry=3
+password    sufficient    pam_unix.so md5 shadow nullok try_first_pass use_authtok
+password    sufficient    pam_sss.so use_authtok
+password    required      pam_deny.so
+
+session     optional      pam_keyinit.so revoke
+session     required      pam_limits.so
+session     [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid
+session     required      pam_unix.so
+session     optional      pam_sss.so
+
diff --git a/roles/openLdapClient/templates/sssd.j2 b/roles/openLdapClient/templates/sssd.j2
index 9b7f8db..05c9acf 100644
--- a/roles/openLdapClient/templates/sssd.j2
+++ b/roles/openLdapClient/templates/sssd.j2
@@ -27,6 +27,7 @@ ldap_tls_cacert = {{ ldapCaCertFile }}
 ldap_default_bind_dn = {{ ldapBindDN }} 
 ldap_default_authtok_type = password
 ldap_default_authtok = {{ ldapBindDNPassword }} 
+ldap_access_filter = {{ ldap_access_filter }}
 
 {{ ldapRfc2307 }}
 
diff --git a/roles/openLdapClient/templates/system-auth-ac.j2 b/roles/openLdapClient/templates/system-auth-ac.j2
index 4c96e49..2f9036e 100644
--- a/roles/openLdapClient/templates/system-auth-ac.j2
+++ b/roles/openLdapClient/templates/system-auth-ac.j2
@@ -4,21 +4,21 @@
 auth        required      pam_env.so
 auth        sufficient    pam_unix.so nullok try_first_pass
 auth        requisite     pam_succeed_if.so uid >= 500 quiet
-auth        sufficient    pam_ldap.so use_first_pass
+auth        sufficient    pam_sss.so use_first_pass
 auth        required      pam_deny.so
 
 account     required      pam_unix.so broken_shadow
 account     sufficient    pam_succeed_if.so uid < 500 quiet
-account     [default=bad success=ok user_unknown=ignore] pam_ldap.so
+account     [default=bad success=ok user_unknown=ignore] pam_sss.so
 account     required      pam_permit.so
 
 password    requisite     pam_cracklib.so try_first_pass retry=3
 password    sufficient    pam_unix.so md5 shadow nullok try_first_pass use_authtok
-password    sufficient    pam_ldap.so use_authtok
+password    sufficient    pam_sss.so use_authtok
 password    required      pam_deny.so
 
 session     optional      pam_keyinit.so revoke
 session     required      pam_limits.so
 session     [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid
 session     required      pam_unix.so
-session     optional      pam_ldap.so
+session     optional      pam_sss.so
diff --git a/roles/slurm/tasks/main.yml b/roles/slurm/tasks/main.yml
index d351e46..b14c740 100644
--- a/roles/slurm/tasks/main.yml
+++ b/roles/slurm/tasks/main.yml
@@ -16,11 +16,11 @@
   sudo: true
 
 - name: create slurm group
-  group: name=slurm
+  group: name=slurm system=yes
   sudo: true
 
 - name: create slurm user
-  user: name=slurm group=slurm
+  user: name=slurm group=slurm system=yes
   sudo: true
 
 - name: install slurm rpms
diff --git a/roles/ssh-password-login/handlers/main.yml b/roles/ssh-password-login/handlers/main.yml
new file mode 100644
index 0000000..7141e8e
--- /dev/null
+++ b/roles/ssh-password-login/handlers/main.yml
@@ -0,0 +1,3 @@
+- name: "restart sshd"
+  service: name=sshd state=restarted
+  sudo: true
diff --git a/roles/ssh-password-login/tasks/main.yml b/roles/ssh-password-login/tasks/main.yml
new file mode 100644
index 0000000..9ea2baa
--- /dev/null
+++ b/roles/ssh-password-login/tasks/main.yml
@@ -0,0 +1,20 @@
+- name: "Enable Challenge Response"
+  lineinfile:
+  args:
+    dest: /etc/ssh/sshd_config
+    regexp: "ChallengeResponseAuthentication no"
+    line: "ChallengeResponseAuthentication yes" 
+    backrefs: yes
+  sudo: true
+  notify: restart sshd
+
+- name: "Enable Challenge Response"
+  lineinfile:
+  args:
+    dest: /etc/ssh/sshd_config
+    regexp: "PasswordAuthentication no"
+    line: "PasswordAuthentication yes"
+    backrefs: yes
+  sudo: true
+  notify: restart sshd
+
diff --git a/roles/strudel_config/tasks/main.yml b/roles/strudel_config/tasks/main.yml
new file mode 100644
index 0000000..b7bec42
--- /dev/null
+++ b/roles/strudel_config/tasks/main.yml
@@ -0,0 +1,8 @@
+- name: "Set login node"
+  set_fact: 
+    loginNode: "{{ ansible_eth0.ipv4.address }}"
+
+- name: "Temlate Strudel config"
+  template: src=generic_slurm_config.json.j2 dest=/tmp/Strudel_Desktops.json
+  delegate_to: 127.0.0.1
+  run_once: True
diff --git a/roles/strudel_config/templates/generic_slurm_config.json.j2 b/roles/strudel_config/templates/generic_slurm_config.json.j2
new file mode 100644
index 0000000..3acb443
--- /dev/null
+++ b/roles/strudel_config/templates/generic_slurm_config.json.j2
@@ -0,0 +1,452 @@
+[
+    [
+        "GenericDesktops"
+    ],
+    {
+        "GenericDesktops": {
+            "__class__": "siteConfig",
+            "__module__": "siteConfig",
+            "agent": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -l {username} {execHost} \"echo agent_hello; bash \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "agent_hello"
+                ],
+                "requireMatch": true
+            },
+            "authURL": null,
+            "authorizedKeysFile": null,
+            "dbusSessionBusAddress": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DISPLAY={vncDisplay};timeout 15 /usr/local/bin/cat_dbus_session_file.sh'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^DBUS_SESSION_BUS_ADDRESS=(?P<dbusSessionBusAddress>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "defaults": {
+                "jobParams_hours": 48,
+                "jobParams_mem": 4,
+                "jobParams_ppn": 1
+            },
+            "directConnect": true,
+            "displayStrings": {
+                "__class__": "sshKeyDistDisplayStrings",
+                "__module__": "siteConfig",
+                "createNewKeyDialogNewPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.",
+                "createNewKeyDialogNewPassphraseMismatch": "Passphrases don't match!",
+                "createNewKeyDialogNewPassphraseTooShort": "Passphrase is too short.",
+                "helpEmailAddress": "help@massive.org.au",
+                "networkError": "It looks like a network error has occured. You may be able to resume your work by logging in again.",
+                "newPassphrase": "It looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseMismatch": "Sorry, the two passphrases you entered don't match.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "newPassphraseTitle": "Please enter a new passphrase",
+                "newPassphraseTooShort": "Sorry, the passphrase must be at least six characters.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.",
+                "passphrasePrompt": "Please enter the passphrase for your SSH key",
+                "passphrasePromptIncorrect": "Sorry, that passphrase was incorrect.\nPlease enter the passphrase for you SSH Key\nIf you have forgoten the passphrase for you key, you may need to delete it and create a new key.\nYou can find this option under the Identity menu.\n",
+                "passphrasePromptIncorrectl": "Sorry, that passphrase was incorrect. Please enter the passphrase for your ssh key",
+                "passwdPrompt": "Please enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users",
+                "passwdPromptIncorrect": "Sorry, that password was incorrect.\nPlease enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users",
+                "persistentMessage": "Would you like to leave your current session running so that you can reconnect later?",
+                "persistentMessagePersist": "Leave it running",
+                "persistentMessageStop": "Stop the desktop",
+                "qdelQueuedJob": "It looks like you've been waiting for a job to start.\nDo you want me to delete the job or leave it in the queue so you can reconnect later?\n",
+                "qdelQueuedJobNOOP": "Leave it in the queue (I'll reconnect later)",
+                "qdelQueuedJobQdel": "Delete the job",
+                "reconnectMessage": "An Existing Desktop was found. Would you like to reconnect or kill it and start a new desktop?",
+                "reconnectMessageNo": "New desktop",
+                "reconnectMessageYes": "Reconnect",
+                "temporaryKey": "\nWould you like to use the launchers old behaviour (entering a password every time you start a new desktop) or try the new behaviour (creating an ssh key pair and entering a passphrase the first time you use the launcher after reboot.)\n\nPasswords are recomended if this is a shared user account.\n\nSSH Keys are recommended if you are the only person who uses this account.\n\nThis option can be changed from the Identity menu.\n",
+                "temporaryKeyNo": "Use my SSH Key",
+                "temporaryKeyYes": "Use my password every time"
+            },
+            "displayWebDavInfoDialogOnRemoteDesktop": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'echo -e \\\"You can access your local home directory in Nautilus File Browser, using the location:\\n\\ndav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\n\\nYour one-time password is {vncPasswd}\\\" > ~/.vnc/\\$(hostname){vncDisplay}-webdav.txt'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "execHost": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^(?P<execHost>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "getProjects": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"groups | sed 's@ @\\n@g'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^\\s*(?P<group>\\S+)\\s*$"
+                ],
+                "requireMatch": true
+            },
+            "imageid": null,
+            "instanceFlavour": null,
+            "listAll": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "squeue -u {username} -o \\\"%i %L\\\" | tail -n -1",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "(?P<jobid>(?P<jobidNumber>[0-9]+)) (?P<remainingWalltime>.*)$"
+                ],
+                "requireMatch": false
+            },
+            "loginHost": "{{ loginNode }}",
+            "messageRegexs": [
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^INFO:(?P<info>.*(?:\n|\r\n?))"
+                },
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^WARN:(?P<warn>.*(?:\n|\r\n?))"
+                },
+                {
+                    "__class__": "__regex__",
+                    "pattern": "^ERROR:(?P<error>.*(?:\n|\r\n?))"
+                }
+            ],
+            "onConnectScript": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'module load keyutility ; mountUtility.py'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "openWebDavShareInRemoteFileBrowser": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};/usr/bin/gconftool-2 --type=Boolean --set /apps/nautilus/preferences/always_use_location_entry true {ampersand}{ampersand} DISPLAY={vncDisplay} xdg-open dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\\"\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "otp": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "'cat ~/.vnc/clearpass'",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^(?P<vncPasswd>\\S+)$"
+                ],
+                "requireMatch": true
+            },
+            "provision": null,
+            "relabel": {},
+            "runSanityCheck": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "running": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scontrol show job {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "JobState=RUNNING"
+                ],
+                "requireMatch": true
+            },
+            "setDisplayResolution": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "showStart": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": null,
+                "failFatal": false,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "siteRanges": {
+                "jobParams_hours": [
+                    1,
+                    336
+                ],
+                "jobParams_mem": [
+                    1,
+                    1024
+                ],
+                "jobParams_nodes": [
+                    1,
+                    10
+                ],
+                "jobParams_ppn": [
+                    1,
+                    12
+                ]
+            },
+            "startServer": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; module load turbovnc ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\n/usr/local/bin/vncsession --vnc turbovnc --geometry {resolution} ; sleep 36000000 ' |  sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^Submitted batch job (?P<jobid>(?P<jobidNumber>[0-9]+))$"
+                ],
+                "requireMatch": true
+            },
+            "stop": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scancel {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "stopForRestart": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"scancel {jobidNumber}\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "tunnel": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -L {localPortNumber}:localhost:{remotePortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "tunnel_hello"
+                ],
+                "requireMatch": true
+            },
+            "username": null,
+            "visibility": {
+                "advancedCheckBoxPanel": true,
+                "cipherPanel": "Advanced",
+                "debugCheckBoxPanel": "Advanced",
+                "jobParams_hours": true,
+                "jobParams_nodes": true,
+                "jobParams_ppn": true,
+                "label_hours": true,
+                "label_nodes": true,
+                "label_ppn": true,
+                "resolutionPanel": "Advanced",
+                "resourcePanel": "Advanced",
+                "usernamePanel": true
+            },
+            "vncDisplay": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"cat .vnc/slurm-{jobidNumber}.out\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^.*?started on display \\S+(?P<vncDisplay>:[0-9]+)\\s*$"
+                ],
+                "requireMatch": true
+            },
+            "webDavCloseWindow": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay}; wmctrl -F -i -c {webDavWindowID}'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "webDavIntermediatePort": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^(?P<intermediateWebDavPortNumber>[0-9]+)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavMount": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};echo \\\\\\\"import pexpect;child = pexpect.spawn('gvfs-mount dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}');child.expect('Password: ');child.sendline('{vncPasswd}');child.expect(pexpect.EOF);child.close();print 'gvfs-mount returned ' + str(child.exitstatus)\\\\\\\" {pipe} python\\\"\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^gvfs-mount returned (?P<webDavMountingExitCode>.*)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavRemotePort": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "exec",
+                "loop": false,
+                "regex": [
+                    "^(?P<remoteWebDavPortNumber>[0-9]+)$"
+                ],
+                "requireMatch": true
+            },
+            "webDavTunnel": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": true,
+                "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -oExitOnForwardFailure=yes -R {remoteWebDavPortNumber}:localhost:{localWebDavPortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "local",
+                "loop": false,
+                "regex": [
+                    "tunnel_hello"
+                ],
+                "requireMatch": true
+            },
+            "webDavUnmount": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay};timeout 1 gvfs-mount --unmount-scheme dav'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    null
+                ],
+                "requireMatch": false
+            },
+            "webDavWindowID": {
+                "__class__": "cmdRegEx",
+                "__module__": "siteConfig",
+                "async": false,
+                "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress}; DISPLAY={vncDisplay} xwininfo -root -tree'\"",
+                "failFatal": true,
+                "formatFatal": false,
+                "host": "login",
+                "loop": false,
+                "regex": [
+                    "^\\s+(?P<webDavWindowID>\\S+)\\s+\"{homeDirectoryWebDavShareName}.*Browser.*$"
+                ],
+                "requireMatch": true
+            }
+        }
+    }
+]
\ No newline at end of file
diff --git a/roles/syncExports/tasks/addExports.yml b/roles/syncExports/tasks/addExports.yml
index d3723e7..0ea7e7c 100644
--- a/roles/syncExports/tasks/addExports.yml
+++ b/roles/syncExports/tasks/addExports.yml
@@ -4,4 +4,17 @@
   delegate_to: "{{ nfs_server }}"
   run_once: true
   sudo: true
-  notify: "Reload exports"
+
+# Do not do this as a handler, instead do this here as a task so that it happens imediatly after the exports file is created before any clients
+# attempt a mount
+- name : "Reload exports"
+  command: exportfs -ra
+  delegate_to: "{{ nfs_server }}"
+  run_once: true
+  sudo: true
+
+- name : "Pause ... clients sometimes have errors"
+  command: sleep 60
+  delegate_to: "{{ nfs_server }}"
+  run_once: true
+  sudo: true
diff --git a/roles/vncserver/tasks/main.yml b/roles/vncserver/tasks/main.yml
index b4df688..1509b03 100644
--- a/roles/vncserver/tasks/main.yml
+++ b/roles/vncserver/tasks/main.yml
@@ -1,5 +1,6 @@
 ---
-- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml"
+#- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml"
+- include_vars: "{{ ansible_distribution }}_{{ ansible_architecture }}.yml"
 
 - name: add repos apt
   shell: "add-apt-repository -y 'deb {{ item }} {{ ansible_distribution_release }} main' "
-- 
GitLab