Skip to content
Snippets Groups Projects
Commit 85ab9949 authored by Chris Hines's avatar Chris Hines
Browse files

fix conflict, exactly the same changes were upstream

parents 8a0e913e 05a51ae3
No related branches found
No related tags found
No related merge requests found
---
- name: Install tmux
apt: name=tmux state=latest
sudo: true
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDOzj+PSIuuGMOkMK4UO01Y1d8+6jGYELM+msVDpeSCZCAYCqYzXay6QDl5IFpdlxlhWXzcsfbC8WcHy3z+jW6kP6BcqZd7+eYrZVcWeO9A+p67OcsOHw5ixhCjFlXKxX/3D4JeppQeIUswI33zw90QViAlOPsTQuvIIuNNKQhUvfTVvkljduFXNT3xjLWai+isYKWaCbfmaiQ7EQIQyX9a3RrBKcEsLbghk3UkSq/j1OlMTbIuBKfPu26slPNRQFVBjJJfkx+kFF9ArgywHDN5dX3CxGOJhC2KIBemOC5cXjUbUI15a1UReDqShhb0m4p9pTkFOexGOB17lh1/4nUuYt2xzRahNyAEz9i02eIaVkhYFjVn1OuKJ7pa44YwoGx8RmFjRp8W/i3Crbp/IqBzMCfOZmub98b0I7H9ryg+taACRga6sLqWTDrEAbj7zFmRaaOHDIvrFj5ITO4YKYwSaWKL8w19NX4VJqzO3VVHbmUxFBoK4tGDAQ39w6BfRdxdKb+FIe+MOz68k4ADKHJSf9+LCQOFEikKNkKVUNh7FjLwi5Wz7K4S5wjnrjTUiqNC5imst262UJjtTeg7wE7ngPOlpSi1Mh4pV3/tcAboiRF8ABS/P8P0chln1YbA73x45ZF/Is9XQ2XUJiUwutrcY+upRdu2p9JAeKxGrt8i7w== root@autht
---
- name: restart ssh debian
service: name=ssh state=restarted
sudo: true
when: ansible_os_family == "Debian"
---
- name: copy ca cert
copy: src=server_ca.pub dest=/etc/ssh/server_ca.pub owner=root group=root mode=644
sudo: true
- name: edit sshd_config
lineinfile:
args:
dest: /etc/ssh/sshd_config
line: TrustedUserCAKeys /etc/ssh/server_ca.pub
state: present
sudo: true
notify: restart ssh debian
when: ansible_os_family == "Debian"
...@@ -14,4 +14,14 @@ ...@@ -14,4 +14,14 @@
args: args:
dest: /usr/share/Modules/init/.modulespath dest: /usr/share/Modules/init/.modulespath
line: /usr/local/Modules/modulefiles line: /usr/local/Modules/modulefiles
ignore_errors: true
sudo: true
# for some reason ubuntu uses lowercase modules
- name: add /usr/local/Modules to the module file path
lineinfile:
args:
dest: /usr/share/modules/init/.modulespath
line: /usr/local/Modules/modulefiles
ignore_errors: true
sudo: true sudo: true
...@@ -4,5 +4,5 @@ ...@@ -4,5 +4,5 @@
sudo: true sudo: true
- name: provision cron job - name: provision cron job
cron: name=provision job={{ provision }} user=root minute=*/5 state=present cron: name=provision job="/usr/bin/flock -x -n /tmp/provision.lck -c {{ provision }}" user=root minute=*/30 state=present
sudo: true sudo: true
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
HOME_DIR={{ home_dir }} HOME_DIR={{ home_dir }}
user_list=($(getent passwd | cut -d ":" -f1)) user_list=($(getent passwd | cut -d ":" -f1))
log_file="/root/slurm.log" log_file="/root/slurm.log"
export PATH=$PATH:{{ slurm_dir }}/bin
sacctmgr=$( which sacctmgr )
for user in ${user_list[*]}; do for user in ${user_list[*]}; do
{% if project_check is defined %} {% if project_check is defined %}
...@@ -24,15 +26,15 @@ for user in ${user_list[*]}; do ...@@ -24,15 +26,15 @@ for user in ${user_list[*]}; do
find=$(sacctmgr list cluster ${cluster} | grep ${cluster}) find=$(sacctmgr list cluster ${cluster} | grep ${cluster})
if [ -z "${find}" ]; then if [ -z "${find}" ]; then
su slurm -c "sacctmgr -i add cluster ${cluster}" || { echo "error to create cluster ${cluster}" >> ${log_file} && exit 1; } su slurm -c "$sacctmgr -i add cluster ${cluster}" || { echo "error to create cluster ${cluster}" >> ${log_file} && exit 1; }
fi fi
find=$(sacctmgr list account ${account} | grep ${account}) find=$(sacctmgr list account ${account} | grep ${account})
if [ -z "${find}" ]; then if [ -z "${find}" ]; then
su slurm -c "sacctmgr -i add account ${account} Description=CVL Organization=monash cluster=${cluster}" || { echo "error to create account ${account}" >> ${log_file} && exit 1; } su slurm -c "$sacctmgr -i add account ${account} Description=CVL Organization=monash cluster=${cluster}" || { echo "error to create account ${account}" >> ${log_file} && exit 1; }
fi fi
find=$(sacctmgr list user ${user} | grep ${user}) find=$(sacctmgr list user ${user} | grep ${user})
if [ -z "${find}" ]; then if [ -z "${find}" ]; then
su slurm -c "sacctmgr -i add user ${user} account=${account} cluster=${cluster}" || { echo "error to create user ${user}" >> ${log_file} && exit 1; } su slurm -c "$sacctmgr -i add user ${user} account=${account} cluster=${cluster}" || { echo "error to create user ${user}" >> ${log_file} && exit 1; }
fi fi
fi fi
fi fi
......
--- ---
slurm_provision: /root/slurm_provision.sh
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
- cgmanager - cgmanager
- cgmanager-utils - cgmanager-utils
- libcgmanager0 - libcgmanager0
sudo: true
when: ansible_os_family == "Debian" when: ansible_os_family == "Debian"
- name: config cgroup.conf file - name: config cgroup.conf file
......
...@@ -21,6 +21,10 @@ ...@@ -21,6 +21,10 @@
args: args:
chdir: /tmp/warewulf-nhc-{{ nhc_version }} chdir: /tmp/warewulf-nhc-{{ nhc_version }}
- name: ensure sysconfig dir exists
file: dest=/etc/sysconfig state=directory owner=root group=root mode=755
sudo: true
- name: copy nhc sysconfig script - name: copy nhc sysconfig script
template: dest=/etc/sysconfig/nhc src=nhc.sysconfig.j2 mode=644 template: dest=/etc/sysconfig/nhc src=nhc.sysconfig.j2 mode=644
sudo: true sudo: true
...@@ -39,7 +43,7 @@ ...@@ -39,7 +43,7 @@
register: generate_nhc_config_file register: generate_nhc_config_file
- name: generate config file - name: generate config file
shell: "{{ nhc_dir }}/sbin/nhc-genconf" shell: "{{ nhc_dir }}/sbin/nhc-genconf -d -c {{ nhc_dir }}/etc/nhc/{{ nhc_config_file }} CONFDIR={{ nhc_dir }}/etc/nhc"
sudo: true sudo: true
when: generate_nhc_config_file when: generate_nhc_config_file
......
...@@ -134,7 +134,7 @@ ...@@ -134,7 +134,7 @@
template: src=slurm.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf template: src=slurm.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
sudo: true sudo: true
notify: restart slurm notify: restart slurm
when: slurm_use_vpn==False when: slurm_use_vpn==False
- name: install slurm.conf - name: install slurm.conf
template: src=slurm-vpn.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf template: src=slurm-vpn.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
......
...@@ -91,7 +91,7 @@ ...@@ -91,7 +91,7 @@
"__class__": "cmdRegEx", "__class__": "cmdRegEx",
"__module__": "siteConfig", "__module__": "siteConfig",
"async": false, "async": false,
"cmd": "\"squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"", "cmd": "\"{{ slurm_dir }}/bin/squeue -j {jobidNumber} -o \"%N\" | tail -n -1 | cut -f 1 -d ',' | xargs -iname getent hosts name | cut -f 1 -d ' ' \"",
"failFatal": true, "failFatal": true,
"formatFatal": false, "formatFatal": false,
"host": "login", "host": "login",
...@@ -121,7 +121,7 @@ ...@@ -121,7 +121,7 @@
"__class__": "cmdRegEx", "__class__": "cmdRegEx",
"__module__": "siteConfig", "__module__": "siteConfig",
"async": false, "async": false,
"cmd": "squeue -u {username} -o \\\"%i %L\\\" | tail -n -1", "cmd": "{{ slurm_dir }}/bin/squeue -u {username} -o \\\"%i %L\\\" | tail -n -1",
"failFatal": true, "failFatal": true,
"formatFatal": false, "formatFatal": false,
"host": "login", "host": "login",
...@@ -208,7 +208,7 @@ ...@@ -208,7 +208,7 @@
"__class__": "cmdRegEx", "__class__": "cmdRegEx",
"__module__": "siteConfig", "__module__": "siteConfig",
"async": false, "async": false,
"cmd": "\"scontrol show job {jobidNumber}\"", "cmd": "\"{{ slurm_dir }}/bin/scontrol show job {jobidNumber}\"",
"failFatal": true, "failFatal": true,
"formatFatal": false, "formatFatal": false,
"host": "login", "host": "login",
...@@ -268,7 +268,7 @@ ...@@ -268,7 +268,7 @@
"__class__": "cmdRegEx", "__class__": "cmdRegEx",
"__module__": "siteConfig", "__module__": "siteConfig",
"async": false, "async": false,
"cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\nvncserver ; sleep 36000000 ' | sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"", "cmd": "\"mkdir ~/.vnc ; rm -f ~/.vnc/clearpass ; touch ~/.vnc/clearpass ; chmod 600 ~/.vnc/clearpass ; passwd=\"'$'\"( dd if=/dev/urandom bs=1 count=8 2>/dev/null | md5sum | cut -b 1-8 ) ; echo \"'$'\"passwd > ~/.vnc/clearpass ; cat ~/.vnc/clearpass | vncpasswd -f > ~/.vnc/passwd ; chmod 600 ~/.vnc/passwd ; echo -e '#!/bin/bash\\nvncserver ; sleep 36000000 ' | {{slurm_dir}}/bin/sbatch -p batch -N {nodes} -n {ppn} --time={hours}:00:00 -J desktop_{username} -o .vnc/slurm-%j.out \"",
"failFatal": true, "failFatal": true,
"formatFatal": false, "formatFatal": false,
"host": "login", "host": "login",
...@@ -282,7 +282,7 @@ ...@@ -282,7 +282,7 @@
"__class__": "cmdRegEx", "__class__": "cmdRegEx",
"__module__": "siteConfig", "__module__": "siteConfig",
"async": false, "async": false,
"cmd": "\"scancel {jobidNumber}\"", "cmd": "\"{{ slurm_dir }}/bin/scancel {jobidNumber}\"",
"failFatal": true, "failFatal": true,
"formatFatal": false, "formatFatal": false,
"host": "login", "host": "login",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment