Skip to content
Snippets Groups Projects
Commit 5df8f84d authored by Jupiter Hu's avatar Jupiter Hu
Browse files

fix slurm gres and upgrade slurm version, update m2cvl 12 May2015

parent 57b9a264
No related branches found
No related tags found
No related merge requests found
......@@ -4,7 +4,7 @@
sudo: true
- name: "Mounting NFS mounts"
mount: name={{ item.name }} src=" {{ item.ipv4 }}:{{ item.src }} " fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
mount: name={{ item.name }} src="{{ item.ipv4 }}:{{ item.src }} " fstype={{ item.fstype }} opts={{ item.opts }} state=mounted
with_items: nfsMounts
notify: "restart rpcbind"
notify: "restart idmap"
......
- name: get slurm
shell: wget http://www.schedmd.com/download/archive/slurm-{{ slurm_version }}.tar.bz2
shell: wget https://cvl.massive.org.au/slurm-{{ slurm_version }}.tar.bz2
# shell: wget http://www.schedmd.com/download/archive/slurm-{{ slurm_version }}.tar.bz2
args:
chdir: /tmp
creates: /tmp/slurm-{{ slurm_version }}.tar.bz2
......
......@@ -43,6 +43,10 @@
sudo: true
when: slurmlogdir is defined
- name: create greps directory
file: path={{ slurm_dir }}/etc/gres state=directory owner=slurm group=slurm mode=755
sudo: true
- name: install deps
yum: name={{ item }} state=latest
with_items:
......@@ -107,11 +111,27 @@
- include: installSlurmFromSource.yml
- name: check slurm generic resource
shell: "{{ slurm_gres_check }}"
register: slurm_generic_resource
ignore_errors: true
when: slurm_gres_check is defined
- name: install gres config file
template: src=gres.conf.j2 dest={{ slurm_dir }}/etc/gres.conf mode=644
sudo: true
when: slurm_generic_resource.stdout
- name: install gres sub config file
template: src=gres_sub.conf.j2 dest={{ slurm_dir }}/etc/gres/gres.conf mode=644
sudo: true
when: slurm_gres_list is defined
- name: install slurm.conf
template: src=slurm.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
sudo: true
notify: restart slurm
when: slurm_use_vpn==False
when: slurm_use_vpn==False and slurm_gres_list is defined
- name: install slurm.conf
template: src=slurm-vpn.conf.j2 dest={{ slurm_dir }}/etc/slurm.conf
......
......@@ -141,7 +141,7 @@ MpiParams=ports=12000-12999
{% endfor %}
{% endfor %}
{% for node in nodelist|unique %}
NodeName={{ node }} Procs={{ hostvars[node]['ansible_processor_vcpus'] }} RealMemory={{ hostvars[node].ansible_memory_mb.real.total }} Sockets={{ hostvars[node]['ansible_processor_vcpus'] }} CoresPerSocket=1 ThreadsPerCore={{ hostvars[node].ansible_processor_threads_per_core }} {% if hostvars[node].ansible_hostname.find('vis') != -1 %}Gres=gpu{% if hostvars[node].ansible_hostname.find('k1') > 0 %}:k1m{% endif %}{% if hostvars[node].ansible_hostname.find('k2') > 0 %}:k2m{% endif %}:1{% endif %} {% if hostvars[node]['ansible_processor_vcpus'] == 1 %}Weight=1{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 1 and hostvars[node]['ansible_processor_vcpus'] <= 16 %}Weight=3{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 16 and hostvars[node]['ansible_processor_vcpus'] <= 20 %}Weight=5{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 20 and hostvars[node]['ansible_processor_vcpus'] <= 40 %}Weight=7{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 40 and hostvars[node]['ansible_processor_vcpus'] <= 64 %}Weight=8{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 64 and hostvars[node]['ansible_processor_vcpus'] <= 128 %}Weight=9{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 128 %}Weight=10{% endif %} Feature=stage1 State=UNKNOWN
NodeName={{ node }} Procs={{ hostvars[node]['ansible_processor_vcpus'] }} RealMemory={{ hostvars[node].ansible_memory_mb.real.total }} Sockets={{ hostvars[node]['ansible_processor_vcpus'] }} CoresPerSocket=1 ThreadsPerCore={{ hostvars[node].ansible_processor_threads_per_core }} {% if hostvars[node].ansible_hostname.find('vis') != -1 %}Gres=gpu{% if hostvars[node].ansible_hostname.find('k1') > 0 %}:k1{% endif %}{% if hostvars[node].ansible_hostname.find('k2') > 0 %}:k2{% endif %}:1{% endif %} {% if hostvars[node]['ansible_processor_vcpus'] == 1 %}Weight=1{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 1 and hostvars[node]['ansible_processor_vcpus'] <= 16 %}Weight=3{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 16 and hostvars[node]['ansible_processor_vcpus'] <= 20 %}Weight=5{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 20 and hostvars[node]['ansible_processor_vcpus'] <= 40 %}Weight=7{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 40 and hostvars[node]['ansible_processor_vcpus'] <= 64 %}Weight=8{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 64 and hostvars[node]['ansible_processor_vcpus'] <= 128 %}Weight=9{% endif %}{% if hostvars[node]['ansible_processor_vcpus'] > 128 %}Weight=10{% endif %} Feature=stage1 State=UNKNOWN
{% endfor %}
{% for queue in slurmqueues %}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment