diff --git a/dynamicInventory b/dynamicInventory index bfb6b4b9bf9c00dcda7ca7d65b6a51391219e04e..cd93e153702649a30dda0a207edce50e21fa4740 100755 --- a/dynamicInventory +++ b/dynamicInventory @@ -3,8 +3,13 @@ import sys, os, string, subprocess, socket, re import copy, shlex,uuid, random, multiprocessing, time, shutil, json import novaclient.v1_1.client as nvclient import novaclient.exceptions as nvexceptions +from keystoneclient.auth.identity import v2 as v2_auth +from heatclient import client as heat_client -class Authenticate: +from keystoneclient import session as kssession + + +class OpenStackConnection: def __init__(self, username, passwd): self.username=username @@ -12,39 +17,188 @@ class Authenticate: self.tenantName= os.environ['OS_TENANT_NAME'] self.tenantID= os.environ['OS_TENANT_ID'] self.authUrl="https://keystone.rc.nectar.org.au:5000/v2.0" - - def gatherInfo(self): - ## Fetch the Nova Object + def _get_keystone_v2_auth(self, v2_auth_url, **kwargs): + auth_token = kwargs.pop('auth_token', None) + tenant_id = kwargs.pop('project_id', None) + tenant_name = kwargs.pop('project_name', None) + if auth_token: + return v2_auth.Token(v2_auth_url, auth_token, + tenant_id=tenant_id, + tenant_name=tenant_name) + else: + return v2_auth.Password(v2_auth_url, + username=kwargs.pop('username', None), + password=kwargs.pop('password', None), + tenant_id=tenant_id, + tenant_name=tenant_name) + + + def _get_keystone_session(self, **kwargs): + # first create a Keystone session + cacert = kwargs.pop('cacert', None) + cert = kwargs.pop('cert', None) + key = kwargs.pop('key', None) + insecure = kwargs.pop('insecure', False) + timeout = kwargs.pop('timeout', None) + verify = kwargs.pop('verify', None) + + # FIXME(gyee): this code should come from keystoneclient + if verify is None: + if insecure: + verify = False + else: + # TODO(gyee): should we do + # heatclient.common.http.get_system_ca_fle()? + verify = cacert or True + if cert and key: + # passing cert and key together is deprecated in favour of the + # requests lib form of having the cert and key as a tuple + cert = (cert, key) + return kssession.Session(verify=verify, cert=cert, timeout=timeout) + + def _get_keystone_auth(self, session, auth_url, **kwargs): + # FIXME(dhu): this code should come from keystoneclient + + # discover the supported keystone versions using the given url + v2_auth_url=auth_url + v3_auth_url=None + + # Determine which authentication plugin to use. First inspect the + # auth_url to see the supported version. If both v3 and v2 are + # supported, then use the highest version if possible. + auth = None + if v3_auth_url and v2_auth_url: + user_domain_name = kwargs.get('user_domain_name', None) + user_domain_id = kwargs.get('user_domain_id', None) + project_domain_name = kwargs.get('project_domain_name', None) + project_domain_id = kwargs.get('project_domain_id', None) + + # support both v2 and v3 auth. Use v3 if domain information is + # provided. + if (user_domain_name or user_domain_id or project_domain_name or + project_domain_id): + auth = self._get_keystone_v3_auth(v3_auth_url, **kwargs) + else: + auth = self._get_keystone_v2_auth(v2_auth_url, **kwargs) + elif v3_auth_url: + # support only v3 + auth = self._get_keystone_v3_auth(v3_auth_url, **kwargs) + elif v2_auth_url: + # support only v2 + auth = self._get_keystone_v2_auth(v2_auth_url, **kwargs) + else: + raise exc.CommandError(_('Unable to determine the Keystone ' + 'version to authenticate with using the ' + 'given auth_url.')) + + return auth + + def get_stack_name(self,stack): + stacks=[] + for s in self.hc.stacks.list(): + stacks.append(s.stack_name) + if stack in stacks: + return stack + elif len(stacks)==1: + return stacks[0] + elif len(stacks)==0: + raise Exception("You do not have any heat stacks in your OpenStack Project") + else: + raise Exception("You have multiple heat stacks in your OpenStack Project and I'm not sure which one to use.\n You can select a stack by symlinking to a stack, for example if you have a stack called mycluster do ln -s %s mycluster\n"%stack) - nc = nvclient.Client( auth_url=self.authUrl, + def auth(self): + self.nc = nvclient.Client( auth_url=self.authUrl, username=self.username, api_key=self.passwd, project_id=self.tenantName, tenant_id=self.tenantID, service_type="compute" ) + kwargs = { + 'insecure': False, + } + keystone_session = self._get_keystone_session(**kwargs) + + kwargs = { + 'username': self.username, + 'password': self.passwd, + 'project_id': self.tenantID, + 'project_name': self.tenantName + } + + keystone_auth = self._get_keystone_auth(keystone_session, + self.authUrl, + **kwargs) + + endpoint = keystone_auth.get_endpoint(keystone_session,service_type='orchestration', region_name=None) + + + kwargs = { + 'username': self.username, + 'include_pass': False, + 'session': keystone_session, + 'auth_url': self.authUrl, + 'region_name': '', + 'endpoint_type': 'publicURL', + 'service_type': 'orchestration', + 'password': self.passwd, + 'auth': keystone_auth, + } + api_version=1 + + self.hc = heat_client.Client(api_version, endpoint, **kwargs) + + + def recurse_resources(self,stack,resource): + result=[] + if 'OS::Nova::Server' in resource.resource_type: + result.append(resource.physical_resource_id) + if 'OS::Heat::ResourceGroup' in resource.resource_type: + for r in self.hc.resources.list(resource.physical_resource_id): + result.extend(self.recurse_resources(stack,r)) + + return result + + def gatherInfo(self,stack_name): + + ## Fetch the Nova Object + instance_ids=[] + for i in self.hc.stacks.list(): + if i.stack_name == stack_name: + for r in self.hc.resources.list(i.stack_name): + instance_ids.extend(self.recurse_resources(stack=i,resource=r)) + + nc=self.nc inventory = {} inventory['_meta'] = { 'hostvars': {} } for server in nc.servers.list(): - if server.metadata: - hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0] - # Set Ansible Host Group - if server.metadata['ansible_host_group'] in inventory: - inventory[server.metadata['ansible_host_group']].append(hostname) - else: - inventory[server.metadata['ansible_host_group']] = [hostname] - # Set the other host variables - inventory['_meta']['hostvars'][hostname] = {} - inventory['_meta']['hostvars'][hostname]['ansible_ssh_user'] = server.metadata['ansible_ssh_user'] - inventory['_meta']['hostvars'][hostname]['ansible_ssh_private_key_file'] = server.metadata['ansible_ssh_private_key_file'] - - else: - continue + if server.id in instance_ids: + if server.metadata and 'ansible_host_group' in server.metadata: + #hostname = socket.gethostbyaddr(server.networks.values()[0][0])[0] + hostname = server.name + # Set Ansible Host Group + if server.metadata['ansible_host_group'] in inventory: + inventory[server.metadata['ansible_host_group']].append(hostname) + else: + inventory[server.metadata['ansible_host_group']] = [hostname] + # Set the other host variables + inventory['_meta']['hostvars'][hostname] = {} + inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = server.networks.values()[0][0] + inventory['_meta']['hostvars'][hostname]['ansible_remote_tmp'] = '/tmp/ansible' + for key in server.metadata.keys(): + if 'ansible_ssh' in key: + inventory['_meta']['hostvars'][hostname][key] = server.metadata[key] + + else: + continue print json.dumps(inventory) if __name__ == "__main__": + stack_name=os.path.basename(sys.argv[0]) username = os.environ['OS_USERNAME'] passwd = os.environ['OS_PASSWORD'] - auth = Authenticate(username, passwd) - auth.gatherInfo() + openstack = OpenStackConnection(username, passwd) + openstack.auth() + stack_name=openstack.get_stack_name(stack_name) + openstack.gatherInfo(stack_name) diff --git a/roles/dump_ldap_config/tasks/main.yml b/roles/dump_ldap_config/tasks/main.yml new file mode 100644 index 0000000000000000000000000000000000000000..7e779f14f3d129fdba8342a9a85981018d7407d4 --- /dev/null +++ b/roles/dump_ldap_config/tasks/main.yml @@ -0,0 +1,11 @@ +--- +- name: grab cacert + shell: cat /etc/openldap/certs/cacert.pem + register: ldapCaCertContents + +- name: dump vars + template: src=ldapConfig.j2 dest=/tmp/ldapConfig.out + +- name: fetch vars + fetch: src=/tmp/ldapConfig.out dest=/tmp/ldapConfig.out flat=yes + diff --git a/roles/dump_ldap_config/templates/ldapConfig.j2 b/roles/dump_ldap_config/templates/ldapConfig.j2 new file mode 100644 index 0000000000000000000000000000000000000000..158e144f17a0dacb02d425206d5d87dccd4d72c5 --- /dev/null +++ b/roles/dump_ldap_config/templates/ldapConfig.j2 @@ -0,0 +1,15 @@ +--- +ldapServerHostIpLine: "{{ ansible_eth0.ipv4.address }} {{ ansible_fqdn }}" +ldapCaCertContents: | +{% for l in ldapCaCertContents.stdout_lines %} + {{ l }} +{% endfor %} +ldapCaCertFile: /etc/ssl/certs/cacert.crt +ldapDomain: "{{ ldapDomain }}" +ldapURI: "ldaps://{{ ansible_fqdn }}:636" +ldapBindDN: "{{ ldapBindDN }}" +ldapBindDNPassword: "{{ ldapBindDNPassword }}" +ldapBase: "{{ ldapBase }}" +ldapGroupBase: "{{ ldapGroupBase }}" +ldapRfc2307Pam: "" +ldap_access_filter: "(objectClass=posixAccount)" diff --git a/roles/easy-rsa-certificate/tasks/buildCert.yml b/roles/easy-rsa-certificate/tasks/buildCert.yml index c9b2f9cdd52434a9159d07e3c7175d20221f2c7d..32f5a06f62d3461e0e9f63a2c6c1fa0a55c09c76 100644 --- a/roles/easy-rsa-certificate/tasks/buildCert.yml +++ b/roles/easy-rsa-certificate/tasks/buildCert.yml @@ -2,6 +2,7 @@ - name: "Check client ca certificate" register: ca_cert stat: "path={{ x509_cacert_file }}" + sudo: true - name: "Check certificate and key" shell: (openssl x509 -noout -modulus -in {{ x509_cert_file }} | openssl md5 ; openssl rsa -noout -modulus -in {{ x509_key_file }} | openssl md5) | uniq | wc -l @@ -46,7 +47,7 @@ when: needcert - name: "Creating CSR" - shell: " cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}" + shell: "cd /etc/easy-rsa/2.0; . ./vars; export EASY_RSA=\"${EASY_RSA:-.}\"; \"$EASY_RSA\"/pkitool --csr {{ x509_csr_args }} {{ x509_common_name }}" when: needcert sudo: true diff --git a/roles/easy-rsa-common/tasks/installEasyRsa.yml b/roles/easy-rsa-common/tasks/installEasyRsa.yml index af050cf98aac4afc8b9279cda6915f73dd0b14f9..e66e88a25ef98ac472e9054b2f0c45db7ce44f92 100644 --- a/roles/easy-rsa-common/tasks/installEasyRsa.yml +++ b/roles/easy-rsa-common/tasks/installEasyRsa.yml @@ -6,7 +6,7 @@ when: ansible_os_family == 'RedHat' - name: "Installing easy-rsa" - apt: "name=openvpn state=present" + apt: "name=openvpn state=present update_cache=yes" sudo: True when: ansible_os_family == 'Debian' - diff --git a/roles/easy-rsa-common/tasks/main.yml b/roles/easy-rsa-common/tasks/main.yml index 39760c7a8eb9bd142666798adf73dce0a4706b37..619f8806e8dadd278023cf1dc38f7160f5a10e7e 100644 --- a/roles/easy-rsa-common/tasks/main.yml +++ b/roles/easy-rsa-common/tasks/main.yml @@ -3,6 +3,3 @@ include: installEasyRsa.yml - include: copyConfigurationFile.yml -- - include: yumList.yml - diff --git a/roles/karaage2.7/meta/main.yml b/roles/karaage2.7/meta/main.yml index f6f4f5b9a630b628a2bd4c77cd16f924f8c90434..11e79807cceb222f4d145464d2f0db20a1aefec7 100644 --- a/roles/karaage2.7/meta/main.yml +++ b/roles/karaage2.7/meta/main.yml @@ -1,3 +1,3 @@ --- dependencies: - - { role: easy-rsa-certificate, x509_csr_args: "--server" } + - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" } diff --git a/roles/karaage2.7/tasks/main.yml b/roles/karaage2.7/tasks/main.yml index fe3197637d6756bbead854ed6806e26fa78ee0a7..279b5d5250b99d667a2b51ec9b1e4f881760343a 100644 --- a/roles/karaage2.7/tasks/main.yml +++ b/roles/karaage2.7/tasks/main.yml @@ -1,5 +1,5 @@ --- -- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml" +- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml" - name: install system packages apt apt: name={{ item }} state=installed update_cache=true diff --git a/roles/ldapserver/meta/main.yml b/roles/ldapserver/meta/main.yml index 46f5a2316b48320534f9e99db594e1bb61d34744..11e79807cceb222f4d145464d2f0db20a1aefec7 100644 --- a/roles/ldapserver/meta/main.yml +++ b/roles/ldapserver/meta/main.yml @@ -1,3 +1,3 @@ --- dependencies: - - { role: easy-rsa-certificate, x509_csr_args: "--server" } + - { role: easy-rsa-certificate, x509_csr_args: "", x509_sign_args: "--server", x509_cacert_file: "/etc/ssl/certs/ca.crt", x509_key_file: "/etc/ssl/private/server.key", x509_cert_file: "/etc/ssl/certs/server.crt", x509_common_name: "{{ ansible_fqdn }}" } diff --git a/roles/ldapserver/tasks/main.yml b/roles/ldapserver/tasks/main.yml index a5ec4d6992069cba7b5dcc69e2d51c57829d9e29..41631b452680e59475e3bd65229f7d23123094cf 100644 --- a/roles/ldapserver/tasks/main.yml +++ b/roles/ldapserver/tasks/main.yml @@ -1,6 +1,6 @@ --- -- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ hostvars[ansible_hostname]['ansible_distribution_version'] }}_{{ ansible_architecture }}.yml" +- include_vars: "{{ ansible_distribution }}_{{ ansible_distribution_version }}_{{ ansible_architecture }}.yml" - name: install system packages apt apt: name={{ item }} state=installed update_cache=true diff --git a/roles/nfs-client/tasks/mountFileSystem.yml b/roles/nfs-client/tasks/mountFileSystem.yml index efa0b51501bdd91a5c373f9420599e4b16d2c0c4..1f7a8d17fb2d31c5cb1e3a10d0cd6e1bfcaacaf5 100644 --- a/roles/nfs-client/tasks/mountFileSystem.yml +++ b/roles/nfs-client/tasks/mountFileSystem.yml @@ -1,8 +1,39 @@ --- +- name: "stop fail2ban" + service: name=fail2ban state=stopped + sudo: true + +- name: restart idmap + service: name={{ item }} state=restarted + with_items: + - rpcbind + - rpcidmapd + sudo: true + - name: "Mounting NFS mounts" - mount: name={{ item.name }} src={{ hostvars[nfs_server]['ansible_'+item.interface]['ipv4']['address'] }}:{{ item.src }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted + mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted with_items: exportList notify: "restart authentication" notify: "restart idmap" sudo: true + ignore_errors: true + register: firstMount when: exportList is defined + +- name: "Wait for nfs to stabailse" + command: sleep 60 + delegate_to: 127.0.0.1 + when: firstMount | failed + +- name: "Mounting NFS mounts" + mount: name={{ item.src }} src={{ item.ipv4 }}:{{ item.name }} fstype={{ item.fstype }} opts={{ item.opts }} state=mounted + with_items: exportList + notify: "restart authentication" + notify: "restart idmap" + sudo: true + when: exportList is defined and firstMount | failed + +- name: "restart fail2ban" + service: name=fail2ban state=started + sudo: true + diff --git a/roles/openLdapClient/tasks/configLdapClient.yml b/roles/openLdapClient/tasks/configLdapClient.yml index 21aa7d2501596bc4a864cc3abb77d39da32f0045..b55b502410dfecc2def3bdc83898265f623c37c8 100644 --- a/roles/openLdapClient/tasks/configLdapClient.yml +++ b/roles/openLdapClient/tasks/configLdapClient.yml @@ -6,22 +6,41 @@ - nsswitch.conf sudo: true +- name: "get cert dir" + shell: "dirname {{ ldapCaCertFile }}" + delegate_to: localhost + run_once: true + register: ldapCaCertDir + +- name: "make basedir" + file: path={{ ldapCaCertDir.stdout }} state=directory owner=root + sudo: true + - name: "Copy the CA cert" copy: src={{ ldapCaCertSrc }} dest={{ ldapCaCertFile }} owner=root mode=644 sudo: true when: ldapCaCertSrc is defined +- name: "Template CA cert" + template: src=ldapCaCert.j2 dest={{ ldapCaCertFile }} owner=root mode=644 + sudo: true + when: ldapCaCertContents is defined + +- name: "Copy pam config to ldap client" + template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth + sudo: true + - name: "Copy pam config to ldap client" - template: src=system-auth-ac.j2 dest=/etc/pam.d/system-auth-ac + template: src=password-auth.j2 dest=/etc/pam.d/password-auth sudo: true - name: "Copy system auth to ldap client" template: src=authconfig.j2 dest=/etc/sysconfig/authconfig sudo: true -- name: "Copy ldap.conf file " - template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf - sudo: true +#- name: "Copy ldap.conf file " +# template: src=ldap.conf.j2 dest=/etc/openldap/ldap.conf +# sudo: true - name: "Add LDAP server IP address to /etc/hosts" lineinfile: dest=/etc/hosts line="{{ ldapServerHostIpLine }}" state=present insertafter=EOF @@ -33,4 +52,8 @@ sudo: true notify: restart sssd +- name: "start sssd" + service: name=sssd state=started + sudo: true + diff --git a/roles/openLdapClient/tasks/installOpenLdap.yml b/roles/openLdapClient/tasks/installOpenLdap.yml index 29f085f6542e957bad01800f19da712ee3ac34fb..659a86de958d2b883c2feefa98d399be0764d4dd 100644 --- a/roles/openLdapClient/tasks/installOpenLdap.yml +++ b/roles/openLdapClient/tasks/installOpenLdap.yml @@ -2,15 +2,15 @@ - name: "Install open ldap package yum" action: yum pkg={{ item }} state=installed with_items: - - openldap - - openldap-clients + # - openldap + # - openldap-clients - sssd - sssd-common - sssd-client - nss - nss-tools - - nss-pam-ldapd - - pam_ldap + # - nss-pam-ldapd + # - pam_ldap sudo: true when: ansible_os_family == 'RedHat' diff --git a/roles/openLdapClient/templates/authconfig.j2 b/roles/openLdapClient/templates/authconfig.j2 index 20c2b7f549afdb64c527f7175b02d0a65d3616a9..de5a087e95f105998e109f452d0a29a9bfe95972 100644 --- a/roles/openLdapClient/templates/authconfig.j2 +++ b/roles/openLdapClient/templates/authconfig.j2 @@ -2,7 +2,7 @@ IPADOMAINJOINED=no USEMKHOMEDIR=no USEPAMACCESS=no CACHECREDENTIALS=yes -USESSSDAUTH=no +USESSSDAUTH=yes USESHADOW=yes USEWINBIND=no USEDB=no @@ -10,7 +10,7 @@ FORCELEGACY=no USEFPRINTD=yes FORCESMARTCARD=no PASSWDALGORITHM=sha512 -USELDAPAUTH=yes +USELDAPAUTH=no USEPASSWDQC=no IPAV2NONTP=no USELOCAUTHORIZE=yes @@ -18,9 +18,9 @@ USECRACKLIB=yes USEIPAV2=no USEWINBINDAUTH=no USESMARTCARD=no -USELDAP=yes +USELDAP=no USENIS=no USEKERBEROS=no USESYSNETAUTH=no -USESSSD=no +USESSSD=yes USEHESIOD=no diff --git a/roles/openLdapClient/templates/ldapCaCert.j2 b/roles/openLdapClient/templates/ldapCaCert.j2 new file mode 100644 index 0000000000000000000000000000000000000000..35383b2f3ea87b0c04750ba1f07344179ad7e630 --- /dev/null +++ b/roles/openLdapClient/templates/ldapCaCert.j2 @@ -0,0 +1 @@ +{{ ldapCaCertContents }} diff --git a/roles/openLdapClient/templates/password-auth.j2 b/roles/openLdapClient/templates/password-auth.j2 new file mode 100644 index 0000000000000000000000000000000000000000..b849fdeacc9014fe38215bdf64732ede07597c36 --- /dev/null +++ b/roles/openLdapClient/templates/password-auth.j2 @@ -0,0 +1,25 @@ +# This file is auto-generated. +# User changes will be destroyed the next time authconfig is run. +auth required pam_env.so +auth sufficient pam_unix.so nullok try_first_pass +auth requisite pam_succeed_if.so uid >= 500 quiet +auth sufficient pam_sss.so use_first_pass +auth required pam_deny.so + +account required pam_unix.so +account sufficient pam_localuser.so +account sufficient pam_succeed_if.so uid < 500 quiet +account [default=bad success=ok user_unknown=ignore] pam_sss.so +account required pam_permit.so + +password requisite pam_cracklib.so try_first_pass retry=3 +password sufficient pam_unix.so md5 shadow nullok try_first_pass use_authtok +password sufficient pam_sss.so use_authtok +password required pam_deny.so + +session optional pam_keyinit.so revoke +session required pam_limits.so +session [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid +session required pam_unix.so +session optional pam_sss.so + diff --git a/roles/openLdapClient/templates/sssd.j2 b/roles/openLdapClient/templates/sssd.j2 index 9b7f8dbc8ed538cd0cd37ecde5c78a67bbf28f4e..05c9acf40eb1062dc8255042ff398ca9ea63ae98 100644 --- a/roles/openLdapClient/templates/sssd.j2 +++ b/roles/openLdapClient/templates/sssd.j2 @@ -27,6 +27,7 @@ ldap_tls_cacert = {{ ldapCaCertFile }} ldap_default_bind_dn = {{ ldapBindDN }} ldap_default_authtok_type = password ldap_default_authtok = {{ ldapBindDNPassword }} +ldap_access_filter = {{ ldap_access_filter }} {{ ldapRfc2307 }} diff --git a/roles/openLdapClient/templates/system-auth-ac.j2 b/roles/openLdapClient/templates/system-auth-ac.j2 index 4c96e491eb2eff88c8b5e416941bf65d021c7a95..2f9036e24bd3d33b4626af796c1427a168ad3de3 100644 --- a/roles/openLdapClient/templates/system-auth-ac.j2 +++ b/roles/openLdapClient/templates/system-auth-ac.j2 @@ -4,21 +4,21 @@ auth required pam_env.so auth sufficient pam_unix.so nullok try_first_pass auth requisite pam_succeed_if.so uid >= 500 quiet -auth sufficient pam_ldap.so use_first_pass +auth sufficient pam_sss.so use_first_pass auth required pam_deny.so account required pam_unix.so broken_shadow account sufficient pam_succeed_if.so uid < 500 quiet -account [default=bad success=ok user_unknown=ignore] pam_ldap.so +account [default=bad success=ok user_unknown=ignore] pam_sss.so account required pam_permit.so password requisite pam_cracklib.so try_first_pass retry=3 password sufficient pam_unix.so md5 shadow nullok try_first_pass use_authtok -password sufficient pam_ldap.so use_authtok +password sufficient pam_sss.so use_authtok password required pam_deny.so session optional pam_keyinit.so revoke session required pam_limits.so session [success=1 default=ignore] pam_succeed_if.so service in crond quiet use_uid session required pam_unix.so -session optional pam_ldap.so +session optional pam_sss.so diff --git a/roles/slurm/tasks/main.yml b/roles/slurm/tasks/main.yml index 2706ec975ad072000d05017bd838d17f4bf9b6a2..8b7824c623cfd78ec2f1c44465b87ccabb62a2e6 100644 --- a/roles/slurm/tasks/main.yml +++ b/roles/slurm/tasks/main.yml @@ -16,11 +16,11 @@ sudo: true - name: create slurm group - group: name=slurm + group: name=slurm system=yes sudo: true - name: create slurm user - user: name=slurm group=slurm createhome=no + user: name=slurm group=slurm system=yes createhome=no sudo: true - name: install slurm rpms diff --git a/roles/ssh-password-login/handlers/main.yml b/roles/ssh-password-login/handlers/main.yml new file mode 100644 index 0000000000000000000000000000000000000000..7141e8ee5afee44b78216875e7dc097d679d9661 --- /dev/null +++ b/roles/ssh-password-login/handlers/main.yml @@ -0,0 +1,3 @@ +- name: "restart sshd" + service: name=sshd state=restarted + sudo: true diff --git a/roles/ssh-password-login/tasks/main.yml b/roles/ssh-password-login/tasks/main.yml new file mode 100644 index 0000000000000000000000000000000000000000..9ea2baa20af22f2345044cd4c3a20b5b495acc13 --- /dev/null +++ b/roles/ssh-password-login/tasks/main.yml @@ -0,0 +1,20 @@ +- name: "Enable Challenge Response" + lineinfile: + args: + dest: /etc/ssh/sshd_config + regexp: "ChallengeResponseAuthentication no" + line: "ChallengeResponseAuthentication yes" + backrefs: yes + sudo: true + notify: restart sshd + +- name: "Enable Challenge Response" + lineinfile: + args: + dest: /etc/ssh/sshd_config + regexp: "PasswordAuthentication no" + line: "PasswordAuthentication yes" + backrefs: yes + sudo: true + notify: restart sshd + diff --git a/roles/strudel_config/tasks/main.yml b/roles/strudel_config/tasks/main.yml new file mode 100644 index 0000000000000000000000000000000000000000..b7bec4214ecaa420e962160d4c050170ba411894 --- /dev/null +++ b/roles/strudel_config/tasks/main.yml @@ -0,0 +1,8 @@ +- name: "Set login node" + set_fact: + loginNode: "{{ ansible_eth0.ipv4.address }}" + +- name: "Temlate Strudel config" + template: src=generic_slurm_config.json.j2 dest=/tmp/Strudel_Desktops.json + delegate_to: 127.0.0.1 + run_once: True diff --git a/roles/strudel_config/templates/generic_slurm_config.json.j2 b/roles/strudel_config/templates/generic_slurm_config.json.j2 new file mode 100644 index 0000000000000000000000000000000000000000..3acb443e96311ceec9d09bed0739a2be95c32884 --- /dev/null +++ b/roles/strudel_config/templates/generic_slurm_config.json.j2 @@ -0,0 +1,452 @@ +[ + [ + "GenericDesktops" + ], + { + "GenericDesktops": { + "__class__": "siteConfig", + "__module__": "siteConfig", + "agent": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": true, + "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -l {username} {execHost} \"echo agent_hello; bash \"", + "failFatal": true, + "formatFatal": false, + "host": "local", + "loop": false, + "regex": [ + "agent_hello" + ], + "requireMatch": true + }, + "authURL": null, + "authorizedKeysFile": null, + "dbusSessionBusAddress": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'export DISPLAY={vncDisplay};timeout 15 /usr/local/bin/cat_dbus_session_file.sh'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^DBUS_SESSION_BUS_ADDRESS=(?P<dbusSessionBusAddress>.*)$" + ], + "requireMatch": true + }, + "defaults": { + "jobParams_hours": 48, + "jobParams_mem": 4, + "jobParams_ppn": 1 + }, + "directConnect": true, + "displayStrings": { + "__class__": "sshKeyDistDisplayStrings", + "__module__": "siteConfig", + "createNewKeyDialogNewPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.", + "createNewKeyDialogNewPassphraseMismatch": "Passphrases don't match!", + "createNewKeyDialogNewPassphraseTooShort": "Passphrase is too short.", + "helpEmailAddress": "help@massive.org.au", + "networkError": "It looks like a network error has occured. You may be able to resume your work by logging in again.", + "newPassphrase": "It looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.", + "newPassphraseEmptyForbidden": "Sorry, empty passphrases are forbidden.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.", + "newPassphraseMismatch": "Sorry, the two passphrases you entered don't match.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.", + "newPassphraseTitle": "Please enter a new passphrase", + "newPassphraseTooShort": "Sorry, the passphrase must be at least six characters.\nIt looks like this is the first time you're using the CVL on this\ncomputer. To use the CVL, the launcher will generate a local\npassphrase protected key on your computer which is used to\nauthenticate you and set up your remote CVL environment.\n\nPlease enter a new passphrase (twice to avoid typos) to protect your local key. \nAfter you've done this, your passphrase will be the primary method of\nauthentication for the launcher.\n\nWHY?\n\nThis new method of authentication allows you to create file system\nmounts to remote computer systems, and in the future it will support\nlaunching remote HPC jobs.", + "passphrasePrompt": "Please enter the passphrase for your SSH key", + "passphrasePromptIncorrect": "Sorry, that passphrase was incorrect.\nPlease enter the passphrase for you SSH Key\nIf you have forgoten the passphrase for you key, you may need to delete it and create a new key.\nYou can find this option under the Identity menu.\n", + "passphrasePromptIncorrectl": "Sorry, that passphrase was incorrect. Please enter the passphrase for your ssh key", + "passwdPrompt": "Please enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users", + "passwdPromptIncorrect": "Sorry, that password was incorrect.\nPlease enter the password for your CVL account.\nThis is the password you entered when you requested an account\nat the website https://web.cvl.massive.org.au/users", + "persistentMessage": "Would you like to leave your current session running so that you can reconnect later?", + "persistentMessagePersist": "Leave it running", + "persistentMessageStop": "Stop the desktop", + "qdelQueuedJob": "It looks like you've been waiting for a job to start.\nDo you want me to delete the job or leave it in the queue so you can reconnect later?\n", + "qdelQueuedJobNOOP": "Leave it in the queue (I'll reconnect later)", + "qdelQueuedJobQdel": "Delete the job", + "reconnectMessage": "An Existing Desktop was found. Would you like to reconnect or kill it and start a new desktop?", + "reconnectMessageNo": "New desktop", + "reconnectMessageYes": "Reconnect", + "temporaryKey": "\nWould you like to use the launchers old behaviour (entering a password every time you start a new desktop) or try the new behaviour (creating an ssh key pair and entering a passphrase the first time you use the launcher after reboot.)\n\nPasswords are recomended if this is a shared user account.\n\nSSH Keys are recommended if you are the only person who uses this account.\n\nThis option can be changed from the Identity menu.\n", + "temporaryKeyNo": "Use my SSH Key", + "temporaryKeyYes": "Use my password every time" + }, + "displayWebDavInfoDialogOnRemoteDesktop": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'echo -e \\\"You can access your local home directory in Nautilus File Browser, using the location:\\n\\ndav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\n\\nYour one-time password is {vncPasswd}\\\" > ~/.vnc/\\$(hostname){vncDisplay}-webdav.txt'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "execHost": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^(?P<execHost>.*)$" + ], + "requireMatch": true + }, + "getProjects": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"groups | sed 's@ @\\n@g'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^\\s*(?P<group>\\S+)\\s*$" + ], + "requireMatch": true + }, + "imageid": null, + "instanceFlavour": null, + "listAll": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "squeue -u {username} -o \\\"%i %L\\\" | tail -n -1", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "(?P<jobid>(?P<jobidNumber>[0-9]+)) (?P<remainingWalltime>.*)$" + ], + "requireMatch": false + }, + "loginHost": "{{ loginNode }}", + "messageRegexs": [ + { + "__class__": "__regex__", + "pattern": "^INFO:(?P<info>.*(?:\n|\r\n?))" + }, + { + "__class__": "__regex__", + "pattern": "^WARN:(?P<warn>.*(?:\n|\r\n?))" + }, + { + "__class__": "__regex__", + "pattern": "^ERROR:(?P<error>.*(?:\n|\r\n?))" + } + ], + "onConnectScript": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'module load keyutility ; mountUtility.py'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "openWebDavShareInRemoteFileBrowser": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};/usr/bin/gconftool-2 --type=Boolean --set /apps/nautilus/preferences/always_use_location_entry true {ampersand}{ampersand} DISPLAY={vncDisplay} xdg-open dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}\\\"\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "otp": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "'cat ~/.vnc/clearpass'", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^(?P<vncPasswd>\\S+)$" + ], + "requireMatch": true + }, + "provision": null, + "relabel": {}, + "runSanityCheck": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": null, + "failFatal": false, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "running": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"scontrol show job {jobidNumber}\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "JobState=RUNNING" + ], + "requireMatch": true + }, + "setDisplayResolution": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": null, + "failFatal": false, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "showStart": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": null, + "failFatal": false, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "siteRanges": { + "jobParams_hours": [ + 1, + 336 + ], + "jobParams_mem": [ + 1, + 1024 + ], + "jobParams_nodes": [ + 1, + 10 + ], + "jobParams_ppn": [ + 1, + 12 + ] + }, + "startServer": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; module load turbovnc ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\n/usr/local/bin/vncsession --vnc turbovnc --geometry {resolution} ; sleep 36000000 ' | sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^Submitted batch job (?P<jobid>(?P<jobidNumber>[0-9]+))$" + ], + "requireMatch": true + }, + "stop": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"scancel {jobidNumber}\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "stopForRestart": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"scancel {jobidNumber}\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "tunnel": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": true, + "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -L {localPortNumber}:localhost:{remotePortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"", + "failFatal": true, + "formatFatal": false, + "host": "local", + "loop": false, + "regex": [ + "tunnel_hello" + ], + "requireMatch": true + }, + "username": null, + "visibility": { + "advancedCheckBoxPanel": true, + "cipherPanel": "Advanced", + "debugCheckBoxPanel": "Advanced", + "jobParams_hours": true, + "jobParams_nodes": true, + "jobParams_ppn": true, + "label_hours": true, + "label_nodes": true, + "label_ppn": true, + "resolutionPanel": "Advanced", + "resourcePanel": "Advanced", + "usernamePanel": true + }, + "vncDisplay": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"cat .vnc/slurm-{jobidNumber}.out\"", + "failFatal": true, + "formatFatal": false, + "host": "exec", + "loop": false, + "regex": [ + "^.*?started on display \\S+(?P<vncDisplay>:[0-9]+)\\s*$" + ], + "requireMatch": true + }, + "webDavCloseWindow": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay}; wmctrl -F -i -c {webDavWindowID}'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "webDavIntermediatePort": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"", + "failFatal": true, + "formatFatal": false, + "host": "exec", + "loop": false, + "regex": [ + "^(?P<intermediateWebDavPortNumber>[0-9]+)$" + ], + "requireMatch": true + }, + "webDavMount": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} \\\"export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};echo \\\\\\\"import pexpect;child = pexpect.spawn('gvfs-mount dav://{localUsername}@localhost:{remoteWebDavPortNumber}/{homeDirectoryWebDavShareName}');child.expect('Password: ');child.sendline('{vncPasswd}');child.expect(pexpect.EOF);child.close();print 'gvfs-mount returned ' + str(child.exitstatus)\\\\\\\" {pipe} python\\\"\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^gvfs-mount returned (?P<webDavMountingExitCode>.*)$" + ], + "requireMatch": true + }, + "webDavRemotePort": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/local/bin/get_ephemeral_port.py\"", + "failFatal": true, + "formatFatal": false, + "host": "exec", + "loop": false, + "regex": [ + "^(?P<remoteWebDavPortNumber>[0-9]+)$" + ], + "requireMatch": true + }, + "webDavTunnel": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": true, + "cmd": "{sshBinary} -A -c {cipher} -t -t -oStrictHostKeyChecking=no -oExitOnForwardFailure=yes -R {remoteWebDavPortNumber}:localhost:{localWebDavPortNumber} -l {username} {execHost} \"echo tunnel_hello; bash\"", + "failFatal": true, + "formatFatal": false, + "host": "local", + "loop": false, + "regex": [ + "tunnel_hello" + ], + "requireMatch": true + }, + "webDavUnmount": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress};export DISPLAY={vncDisplay};timeout 1 gvfs-mount --unmount-scheme dav'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + null + ], + "requireMatch": false + }, + "webDavWindowID": { + "__class__": "cmdRegEx", + "__module__": "siteConfig", + "async": false, + "cmd": "\"/usr/bin/ssh {execHost} 'export DBUS_SESSION_BUS_ADDRESS={dbusSessionBusAddress}; DISPLAY={vncDisplay} xwininfo -root -tree'\"", + "failFatal": true, + "formatFatal": false, + "host": "login", + "loop": false, + "regex": [ + "^\\s+(?P<webDavWindowID>\\S+)\\s+\"{homeDirectoryWebDavShareName}.*Browser.*$" + ], + "requireMatch": true + } + } + } +] \ No newline at end of file diff --git a/roles/syncExports/tasks/addExports.yml b/roles/syncExports/tasks/addExports.yml index d3723e786ef615eb1224bfb4ce0b435ed74fdc1f..0ea7e7c9a5b0a399a0a4d64c0d8188e9e71f3844 100644 --- a/roles/syncExports/tasks/addExports.yml +++ b/roles/syncExports/tasks/addExports.yml @@ -4,4 +4,17 @@ delegate_to: "{{ nfs_server }}" run_once: true sudo: true - notify: "Reload exports" + +# Do not do this as a handler, instead do this here as a task so that it happens imediatly after the exports file is created before any clients +# attempt a mount +- name : "Reload exports" + command: exportfs -ra + delegate_to: "{{ nfs_server }}" + run_once: true + sudo: true + +- name : "Pause ... clients sometimes have errors" + command: sleep 60 + delegate_to: "{{ nfs_server }}" + run_once: true + sudo: true diff --git a/roles/vncserver/tasks/main.yml b/roles/vncserver/tasks/main.yml index b4df6889cdd83c26b50f59f044a849bc2e82c8da..1509b03ac37cf5dfc26d6148af16cf2094023bbd 100644 --- a/roles/vncserver/tasks/main.yml +++ b/roles/vncserver/tasks/main.yml @@ -1,5 +1,6 @@ --- -- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml" +#- include_vars: "{{ hostvars[ansible_hostname]['ansible_distribution'] }}_{{ ansible_architecture }}.yml" +- include_vars: "{{ ansible_distribution }}_{{ ansible_architecture }}.yml" - name: add repos apt shell: "add-apt-repository -y 'deb {{ item }} {{ ansible_distribution_release }} main' "