Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hpc-team/HPCasCode
  • chines/ansible_cluster_in_a_box
2 results
Show changes
Showing
with 2421 additions and 33 deletions
- name: "restart sshdx11"
service: name=sshd state=restarted
become: true
when: ansible_os_family == "RedHat"
- name: "restart sshx11"
service: name=ssh state=restarted
become: true
when: ansible_os_family == "Debian"
- name: "Enable x11 forwarding"
lineinfile:
args:
dest: /etc/ssh/sshd_config
regexp: "X11Forwarding no"
line: "X11Forwarding yes"
backrefs: yes
become: true
notify:
- restart sshdx11
- restart sshx11
- name: "uncomment x11displayoffset"
lineinfile:
args:
dest: /etc/ssh/sshd_config
regexp: "#X11DisplayOffset 10"
line: "X11DisplayOffset 10"
backrefs: yes
become: true
notify:
- restart sshdx11
- restart sshx11
- name: "set x11uselocalhost no"
lineinfile:
args:
dest: /etc/ssh/sshd_config
regexp: "#X11UseLocalhost yes"
line: "X11UseLocalhost no"
backrefs: yes
become: true
notify:
- restart sshdx11
- restart sshx11
#!/usr/bin/python3
# The purpose of this script is to enrich every VM of an ansible inventory file
# in json format with the available hypervisor mapping found in /projects/pMOSP/hypervisor/
hypervisormapping = open('m3-latest', 'r') # coming from /projects/pMOSP/hypervisor/m3-latest
# for monarch see /projects/pMOSP/hypervisor/monarch-vm-hw-mapping-2020
mapping = hypervisormapping.readlines()
mapping=mapping[3:-1]
import json,socket,sys
with open('m3inventory.json') as json_file: # this file was created via m3inventory > m3inventory.json
inv = json.load(json_file)
for map in mapping:
vm=map.split('|')[2].strip()
hyp=map.split('|')[4].strip()
if vm not in inv['_meta']['hostvars'].keys():
sys.stderr.write("Not found in inventory: {}\n".format(vm))
continue
inv['_meta']['hostvars'][vm]['hypervisor_ip']=socket.gethostbyname(hyp+'-1g.erc.monash.edu')
print( "#!/bin/bash\necho '"+json.dumps(inv,indent=4, sort_keys=True)+"'")
import pandas as pd
import zipfile
import json
import os,sys,yaml
def task_result(task):
# 0 is skipped (Because of a conditional"
# 1 is not changed (everything should be 1)
# 2 is changed (you should run ansible playbook to fix the cluster)
# 3 is skipped due to check mode (you should not write a role like this)
# 4 is failed. You've got somethign to fix.
if "skipped" in task and task["skipped"] is True:
if "skipped_reason" in task and task["skipped_reason"] == "Conditional result was False":
return 0
if "msg" in task and task["msg"] == "skipped, running in check mode":
return 3
if "failed" in task and task["failed"]:
return 4
if "changed" in task:
if task["changed"]:
return 2
else:
return 1
def change_value(change):
if change == False:
return 1
if change == True:
return 2
return 0
def change_str(change):
if change == 0:
return "N/A"
if change == 1:
return "False"
if change == 2:
return "True"
if change == 3:
return "Skipped in check mode"
if change == 4:
return "Failed"
def get_changes(data):
for play in data['plays']:
for task in play['tasks']:
for host,hosttask in task['hosts'].items():
#yield {'task':task['task']['name'],'host':host,'change':change_value(hosttask['changed']),'changestr':hosttask['changed'],'taskid':task['task']['id']}
yield {'task':task['task']['name'],'host':host,'change':task_result(hosttask),'changestr':change_str(task_result(hosttask)),'taskid':task['task']['id']}
#def change_value(change):
# if change == False:
# return 1
# if change == True:
# return 2
# return 0
#
#def change_str(change):
# if change == 0:
# return "N/A"
# if change == 1:
# return "False"
# if change == 2:
# return "True"
#
#def get_changes(data):
# for play in data['plays']:
# for task in play['tasks']:
# for host,hosttask in task['hosts'].items():
# yield {'task':task['task']['name'],'host':host,'change':change_value(hosttask['changed']),'changestr':hosttask['changed'],'taskid':task['task']['id']}
def load_data(artifactfile="artifacts.zip", nodeclass="compute_ansible_check.log"):
#with zipfile.ZipFile(artifactfile,'r') as zipf:
# data = json.loads(zipf.read(nodeclass))
with open(nodeclass) as f:
data = json.loads(f.read())
# Create my dataframe from a list of dictionaries
df = pd.DataFrame(list(get_changes(data)))
# Extract a mapping from the taskid to the task name
taskmap = df[['task','taskid']].copy().drop_duplicates().set_index('taskid')
# reindex the list of values, so that each change even can be referecned by a unique combination of host and taskid
midx = pd.MultiIndex.from_frame((df[['host','taskid']]))
df = df.set_index(midx);
# Assume that ever host executes every task. Use fillna to fill in task which hosts don't execute
#print(df)
#print(df.unstack('taskid'))
df.fillna(0)
#df=df.unstack('taskid').fillna(0).stack()
# Since our dataframe is now bigger (has those pesky NaNs filled in with zeros) create a new list of hosts and tasks.
hosts = df.index.get_level_values(0)
df['host']=hosts
tasks = list(map(lambda x: taskmap.loc[x]['task'],df.index.get_level_values(1)))
df['task'] = tasks
changestr = list(map(lambda x: change_str(x), df['change']))
df['changestr'] = changestr
df['taskid'] = df.index.get_level_values(1)
return df
def bokeh_plot(df,title):
# Create a series of colour bars (i.e. a HeatMap) from a list
# The list should include columns for task, host, change and changestr
# (the value of change sets the colour but the value of changestr is shown in the tool tip)
from bokeh.io import output_file, show
from bokeh.layouts import column
from bokeh.plotting import figure
from bokeh.models import LinearColorMapper, BasicTicker, PrintfTickFormatter, ColorBar, Div
from bokeh.plotting import figure, save
from math import pi
# this is an abbreviated colormap from a bokeh example
colors = ['#084594', '#2171b5', '#4292c6', "#dfccce", "#550b1d"]
#colors = [ "#e2e2e2", "#dfccce", "#550b1d"]
mapper = LinearColorMapper(palette=colors, low=0, high=4)
#colors = [ "#e2e2e2", "#dfccce", "#550b1d"]
#mapper = LinearColorMapper(palette=colors, low=0, high=2)
TOOLS = "hover,save,pan,box_zoom,reset,wheel_zoom"
dataxrange = list(df.index.get_level_values(1).unique())
datayrange = list(df.index.get_level_values(0).unique())
p = figure(title=title,
x_range=dataxrange, y_range=datayrange,
x_axis_location="above",
sizing_mode='stretch_width',
tools=TOOLS, toolbar_location='below',
tooltips=[('host', '@host'), ('task', '@task'), ('changed', '@changestr'),('taskid','@taskid') ])
p.grid.grid_line_color = None
p.axis.axis_line_color = None
p.xaxis.major_tick_line_color = None # turn off x-axis major ticks
p.xaxis.minor_tick_line_color = None # turn off x-axis minor ticks
p.xaxis.major_label_text_color = None # turn off x-axis tick labels leaving space
p.xaxis.major_label_text_font_size = '0pt' # turn off x-axis tick labels
p.yaxis.major_tick_line_color = None # turn off x-axis major ticks
p.yaxis.minor_tick_line_color = None # turn off x-axis minor ticks
p.yaxis.major_label_text_color = None # turn off y-axis tick labels leaving space
p.yaxis.major_label_text_font_size = '0pt' # turn off y-axis tick labels
#p.axis.major_tick_line_color = None
#p.axis.major_label_text_font_size = "5pt"
#p.axis.major_label_standoff = 0
#p.xaxis.major_label_orientation = pi / 3
p.rect(x="taskid", y="host", width=1, height=1,
source=df,
fill_color={'field': 'change', 'transform': mapper},
line_color=None)
save(p)
return p
import logging
from slack_logger import SlackHandler, SlackFormatter
slack_hook = os.environ['SLACK_HOOK']
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
sh = SlackHandler(username='m3-ansible-check', icon_emoji=':robot_face:', url=slack_hook)
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
import bokeh.io
from datetime import datetime
strBokehfile="output.html"
#datetime.today().strftime('%Y%m%d')+'.html'
bokeh.io.output_file(strBokehfile)
#from bokeh.io import curdoc
from bokeh.models import Div
from bokeh.layouts import layout, column
import logging
import sys
import os
if (len(sys.argv)>1 and 'outputChangedNodeList' in sys.argv):
print(yaml.dump(list(df[df.change == 2].host.unique())))
sys.exit(0)
df = load_data(nodeclass="compute_ansible_check.log")
if (len(sys.argv)>1 and 'bokehplot' in sys.argv):
cmpplot = bokeh_plot(df, "Compute Nodes")
cmd="mv "+strBokehfile+" comp_"+strBokehfile
os.system(cmd)
#cmd='swift upload ansiblechecker comp_'+strBokehfile
#os.system(cmd)
nodes = len(df.host.unique())
changed = len(df[df.change == 2].host.unique())
failed = len(df[df.change == 4].host.unique())
logger.info("{} Compute nodes, {} had at least one change {} had at least one failed task".format(nodes,changed,failed))
df = load_data(nodeclass="login_ansible_check.log")
if (len(sys.argv)>1 and 'bokehplot' in sys.argv):
cmpplot = bokeh_plot(df, "login Nodes")
cmd="mv "+strBokehfile+" login_"+strBokehfile
os.system(cmd)
#cmd='swift upload ansiblechecker login_'+strBokehfile
#os.system(cmd)
nodes = len(df.host.unique())
changed = len(df[df.change == 2].host.unique())
failed = len(df[df.change == 4].host.unique())
logger.info("{} Login nodes, {} had at least one change {} had at least one failed task".format(nodes,changed,failed))
df = load_data(nodeclass="mgmt_ansible_check.log")
if (len(sys.argv)>1 and 'bokehplot' in sys.argv):
cmpplot = bokeh_plot(df, "mgmt_ Nodes")
cmd="mv "+strBokehfile+" mgmt_"+strBokehfile
os.system(cmd)
#cmd='swift upload ansiblechecker mgmt_'+strBokehfile
#os.system(cmd)
nodes = len(df.host.unique())
changed = len(df[df.change == 2].host.unique())
failed = len(df[df.change == 4].host.unique())
logger.info("{} Management nodes, {} had at least one change {} had at least one failed task".format(nodes,changed,failed))
df = load_data(nodeclass="dgx_ansible_check.log")
if (len(sys.argv)>1 and 'bokehplot' in sys.argv):
cmpplot = bokeh_plot(df, "dgx_ Nodes")
cmd="mv "+strBokehfile+" dgx_"+strBokehfile
os.system(cmd)
#cmd='swift upload ansiblechecker dgx_'+strBokehfile
#os.system(cmd)
nodes = len(df.host.unique())
changed = len(df[df.change == 2].host.unique())
failed = len(df[df.change == 4].host.unique())
logger.info("{} DGX nodes, {} had at least one change {} had at least one failed task".format(nodes,changed,failed))
logger.info("this is defined in .gitlab-ci.yml in ansible_check and the trigger is configured in https://gitlab.erc.monash.edu.au/hpc-team/clusterbuild/pipeline_schedules ")
#logger.info("https://swift.rc.nectar.org.au/v1/AUTH_e86c925319094fb2b8cc1bf2373c69dc/ansiblechecker/"+strBokehfile)
str="https://gitlab.erc.monash.edu.au/hpc-team/clusterbuild/-/jobs/"+os.environ['CI_JOB_ID']+"/artifacts/browse"
logger.info(str)
#if (len(sys.argv)>1 and 'bokehplot' in sys.argv):
#cmpplot = bokeh_plot(df, "Compute Nodes")
#cmd='swift upload ansiblechecker '+strBokehfile
#os.system(cmd)
\ No newline at end of file
#!/bin/bash
hpcca_public_key='cert-authority ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCfHlWGrnpirvqvUTySnoQK6ze5oIXz7cYIT+XCBeBCahlK05O38g0erBGrNWFozZwbIXnysVCibaUJqtH0JrYqmcr2NnYA0PoiTeranvaJI7pQsga1gBxfK/D4UItw5yI6V7w9efMT0zpIP8WEubQz6GFtkyiNVgFCHj3+VhLs3RslvYzb35SFcLXEDsGVQM5NdWBUgRaNRqpTPvuMcxTyPvy32wW72kwaYRQioDJFcE2WJ240M2oSsx+dhTWvI8sW1sEUI1qIDfyBPsOgsLofuSpt4ZNgJqBUTp/hW85wVpNzud6A4YJWHpZXSDMtUMYE9QL+x2fw/b26yck9ZPE/ hines@tun'
mkdir -p /home/ec2-user/.ssh
echo $hpcca_public_key >> /home/ec2-user/.ssh/authorized_keys
#!/bin/bash
card_number=1
port_number=1
rm -rf /etc/udev/rules.d/70-persistent-net.rules
rm -rf /etc/udev/rules.d/98-persistent-net.rules
rm -rf /etc/udev/rules.d/99-dhcp-all-interfaces.rules
for dev in $(ip link show | grep mtu | awk -F":" '{print $2}'|xargs); do
# Check if it is a physical device?
if [[ -e /sys/class/net/$dev/device/resource ]] && \
[[ -e /sys/class/net/$dev/speed ]]; then
# For TP NIC
ip link set $dev up && dev_speed=$(cat /sys/class/net/$dev/speed) || continue
dev_speed=$(cat /sys/class/net/$dev/speed)
if [[ $dev_speed -le 10000 ]] && \
[[ "$(cat /sys/class/net/$dev/operstate)" == "up" ]]; then
hwaddress=$(ethtool -P $dev | awk '{print $3}')
echo -e SUBSYSTEM==\"net\", ACTION==\"add\", DRIVERS==\"?*\", ATTR{address}==\"$hwaddress\", NAME=\"e1p1\" >> /etc/udev/rules.d/98-persistent-net.rules
# For HIGH SPEED NIC
elif [[ $dev_speed -ge 10000 ]] && \
[[ "$(cat /sys/class/net/$dev/device/vendor)" == "0x15b3" ]] && \
[[ -e /sys/class/net/$dev/device/sriov_numvfs ]]; then
#echo $dev $card_number $port_number
if [[ $port_number -gt 2 ]]; then
card_number=2
port_number=1
fi
hwaddress=$(ethtool -P $dev | awk '{print $3}')
if [[ "$dev" != "p${card_number}p${port_number}" ]];then
if [[ "$dev" == "p1p2" ]] || [[ "p${card_number}p${port_number}" == "p1p2" ]];then
echo 0 > /sys/class/net/$dev/device/sriov_numvfs
fi
ip link set $dev down
ip link set $dev name "p${card_number}p${port_number}"
dev="p${card_number}p${port_number}"
ip link set $dev up
fi
echo -e SUBSYSTEM==\"net\", ACTION==\"add\", DRIVERS==\"?*\", ATTR{address}==\"$hwaddress\", NAME=\"p${card_number}p${port_number}\" >> /etc/udev/rules.d/98-persistent-net.rules
port_number=$(( port_number + 1 ))
fi
fi
done
\ No newline at end of file
#!/usr/bin/python
import random
import sys
import string
def get_passwd(f,passname):
f.seek(0)
for line in f.readlines():
(key,passwd)=line.split(':')
if key==passname:
f.close()
return passwd.rstrip()
return None
def mk_passwd(f,passname):
passwd=''.join(random.choice(string.ascii_uppercase + string.digits+string.ascii_lowercase) for _ in range(16))
f.write("%s:%s\n"%(passname,passwd))
return passwd
try:
f=open('../passwd.txt','at+')
except:
f=open('./passwd.txt','at+')
passname = sys.argv[1]
passwd = get_passwd(f,passname)
if passwd == None:
passwd = mk_passwd(f,passname)
print passwd
f.close()
#!/usr/bin/env python
import sys, os, string, socket, re
import shlex, multiprocessing, time, shutil, json
from novaclient import client as nvclient
from cinderclient import client as cdclient
import novaclient.exceptions as nvexceptions
from keystoneclient import client as ksclient
from joblib import Parallel, delayed
from multiprocessing import Process, Manager, Pool
import yaml
def gatherInfo(md_key,md_value,authDict,project_id,inventory):
## Fetch the Nova Object
from keystoneclient import client as ksclient
from keystoneauth1.identity import v3
from keystoneauth1 import session
auth = v3.Password(project_id=project_id,**authDict)
sess = session.Session(auth=auth)
nc = nvclient.Client('2.0',session=sess)
cc = cdclient.Client('3.0',session=sess)
for server in nc.servers.list():
if server.metadata and \
'ansible_host_groups' in server.metadata and \
md_key in server.metadata:
if server.metadata[md_key].strip() != md_value.strip(): continue
unwantedChars = """][")("""
rgx = re.compile('[%s]' % unwantedChars)
ansible_groups = rgx.sub('', server.metadata['ansible_host_groups']).split(',')
hostname = server.name
novaVolumes = cc.volumes.list(server.id)
# Set Ansible Host Group
for group in ansible_groups:
groupName = group.strip()
if groupName not in inventory: inventory[groupName] = []
inventory[groupName].append(hostname)
# Add other metadata
if not hostname in inventory['_meta']['hostvars']:
inventory['_meta']['hostvars'][hostname] = {}
for md in server.metadata.items():
if md[0] not in (md_key,'ansible_host_groups'):
inventory['_meta']['hostvars'][hostname].update({ md[0]:md[1] })
if novaVolumes:
volDict = {}
for volume in novaVolumes:
try:
if volume.attachments[0]['server_id'] == server.id:
volDict[volume.name] = {'dev':'/dev/disk/by-id/virtio-' + volume.id[:20],'uuid':volume.id}
except IndexError:
continue
if volDict: inventory['_meta']['hostvars'][hostname]['ansible_host_volumes'] = volDict
network_name=None
if len(list(server.networks.keys())) > 1:
for nn in server.networks.keys():
if 'internal' in nn:
network_name = nn
else:
inventory['_meta']['hostvars'][hostname]['public_host'] = server.networks[nn][0]
for network in server.addresses.items():
for a in network[1]:
if a['OS-EXT-IPS:type'] == 'floating':
inventory['_meta']['hostvars'][hostname]['ext_ip'] = a['addr']
if network_name == None:
try:
network_name = list(server.networks.keys())[0]
except:
print("An error occured while processing ",server)
try:
inventory['_meta']['hostvars'][hostname]['ansible_host'] = server.networks[network_name][0]
except:
print("An error occured while processing ",server)
else:
continue
return inventory
def merge(i,j):
for k in i.keys():
v=i[k]
if k in j:
if isinstance(v,list):
j[k].extend(v)
if isinstance(v,dict):
merge(i[k],j[k])
else:
j[k]=i[k]
if __name__ == "__main__":
inventory = {}
inventory['_meta'] = { 'hostvars': {} }
authDict={}
try:
authDict['auth_url'] = os.environ['OS_AUTH_URL']
authDict['username'] = os.environ['OS_USERNAME']
authDict['password'] = os.environ['OS_PASSWORD']
authDict['user_domain_name'] = os.environ['OS_USER_DOMAIN_NAME']
except KeyError:
print("Env Variables not set, Please run: source <openstack rc file>")
sys.exit()
md_key="clustername"
md_value=sys.argv[1]
from keystoneclient import client as ksclient
import keystoneclient
from keystoneauth1.identity import v3
from keystoneauth1 import session
# auth = v3.Password(username=userName, password=passwd, auth_url=authUrl,user_domain_name=domainName)
auth = v3.Password(unscoped=True,**authDict)
sess = session.Session(auth=auth)
kc = ksclient.Client(session=sess)
kc.include_metadata = False
authmgr = keystoneclient.v3.auth.AuthManager(kc)
projects = authmgr.projects()
enabled_projects = [ x for x in projects if x.enabled ]
inventory_list = Parallel(n_jobs=len(projects))(delayed(gatherInfo) (md_key,md_value, authDict, proj.id, inventory) for proj in enabled_projects)
inventory={}
for i in inventory_list:
merge(i,inventory)
#for k, v in inventory.items():
# sorted_inventory={k:sorted(v)}
for key in inventory:
if key=='_meta':
pass
else:
inventory[key].sort()
yamlinventory = {}
yamlinventory['all']={}
yamlinventory['all']['children'] = {}
yamlinventory['all']['children']['hostvars'] = {}
yamlinventory['all']['children']['hostvars']['hosts'] = inventory['_meta']['hostvars']
yamlinventory['all']['children']['hostvars']['vars'] = {'ansible_python_interpreter': '/usr/bin/python3'}
for g in inventory.keys():
if g != '_meta':
hostdict = {}
for h in inventory[g]:
hostdict[h]={}
yamlinventory['all']['children'][g] = {}
yamlinventory['all']['children'][g]['hosts'] = hostdict
print(yaml.dump(yamlinventory))
#!/usr/bin/env python
from jinja2 import Template, Environment, FileSystemLoader
import itertools
import subprocess
import datetime
import os
import sys
import time
import socket
from subprocess import call
import re
import json
def grab_card_ids():
# This method runs nvidia-smi to grab the card ids, then returns a list
if not os.path.isfile("/bin/nvidia-smi"):
print("nvidia-smi binary not found!")
exit(1)
cmd = ["/bin/nvidia-smi", "--query-gpu=pci.bus_id","--format=csv,noheader"]
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
cards = []
for line in p.stdout.readlines():
bus_id_parts = line.rstrip().split(":")
bus = int(bus_id_parts[1],16)
dev_func = bus_id_parts[2].split(".")
device = int(dev_func[0],16)
function = int(dev_func[1],16)
card = "PCI:{}:{}:{}".format(str(bus),str(device),str(function))
cards.append(card)
return cards
def grab_card_boardname():
if not os.path.isfile("/bin/nvidia-smi"):
print("nvidia-smi binary not found!")
exit(1)
cmd = ["/bin/nvidia-smi", "--query-gpu=name","--format=csv,noheader"]
cards = []
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in p.stdout.readlines():
line = line.rstrip()
cards.append(line)
return cards
def write_xorg_conf(cards):
num_of_cards = len(cards) + 1
boardname = (grab_card_boardname())[0]
gpus = []
file_loader = FileSystemLoader('/opt/generate-xorg/template')
env = Environment(loader=file_loader)
template = env.get_template('xorg.conf.j2')
for i in range(1, num_of_cards):
monitors = []
screens = []
res = list(itertools.combinations(cards,i))
for j in range(i):
monitors.append("Monitor"+str(j))
screens.append("Screen"+str(j))
for card in res:
filename = "/etc/X11/xorg.conf." + str(i) + '_' + str(res.index(card))
template.stream({'boardname':boardname,'monitors':monitors,'screens':screens,'devices':card}).dump(filename)
if __name__ == "__main__":
cards = grab_card_ids()
write_xorg_conf(cards)
#!/bin/bash
echo $HOSTNAME
#!/bin/sh
mkdir /local_home
usermod -m -d /local_home/ec2-user ec2-user
./output_dir
3daprecon/0.0.1
3daprecon/1.0
3depict/0.0.15
3dslicer/4.10.2
3dslicer/4.6.0
3dslicer/4.8.1
abaqus/2016
abaqus/2019
abaqus/6.14
abinit/8.8.3
abricate/0.8.13
abyss/2.0.2
adapterremoval/2.3.1
adf/2019.104
adxv/1.9.12
afni/16.2.16
afni/17.0.11
align2rawsignal/2.0
allpathslg/52488
amber/18-multi-gpus
amber/18-parallel
amber/18-parallel-pmemd.gem
amber/18-serial
amber/18-single-gpu
amide/1.0.5
amira/2020.2
amira/6.3.0
amira/6.4.0
amira/6.5.0
anaconda/2018.12-Python3.7-gcc6
anaconda/2019.03-Python3.7-gcc5
anaconda/2020.07-Python3.8-gcc8
anaconda/4.3.1-Python3.5
anaconda/4.3.1-Python3.5-gcc5
anaconda/5.0.1-Python2.7-gcc5
anaconda/5.0.1-Python3.5-gcc5
anaconda/5.0.1-Python3.6-gcc5
anaconda/5.1.0-Python3.6-gcc5
analyze/12.0
analyze-temp/12.0
angsd/0.931
angsd/0.931-realsfs
ansys/18.1
ansys/19.1
ansys/19.2
ansys/20r2
ants/1.9.v4
ants/20190910
ants/2.2.0
ants/2.3.1
ants/2.3.4
any2fasta/0.4.2
apbs/3.0.0
apex/latest
apr/1.6.5
apr-util/1.6.1
argos/3.0.0-beta52
ariba/2.12.1
ariba/2.14.4
armadillo/9.200-rc1
arpack/2.1
arpack/3.1.3-2
arrayfire/3.7.1
ascp/3.5.4
ashs/1.0.0
astra-toolbox/1.9.9.dev4
atlas/3.10.2-gcc4
atlas/3.10.2-gcc5
atom/1.39.1
atomprobedevcode/1.0.0
attr/2.4.46-12
augustus/3.3.3
autodock_vina/1.1.2
automake/1.16.1
automake/1.4-p6
autometa/2019-09
avizo/2020.2
avizo/9.0.1
avizo/9.3.0
avizo/9.3.0.1
avizo/9.4.0
avizo/9.5.0
avizo/9.7
axel/2.12
bamsurgeon/1.2
bamtofastq/1.2.0
bamtools/2.4.1
barrnap/0.9
bart/0.4.04
bart/0.4.04-cuda9.0
bayenv/2.0
bayesass/1.3
bayesass/3.04
bayesass/3.0.4-snps
bayescan/2.1
baypass/2.2
bbcp/17.12
bbmap/38.81
bcbtoolkit/4.0.0
bcftools/1.11
bcftools/1.6
bcftools/1.7
bcftools/1.8
bcl2fastq/2.19.1
beagle/2.1.2
beagle/3.1.2
beast1/1.10.0
beast1/1.8.4
beast2/2.4.7
beast2/2.4.8
beast2/2.5.0
bedtools/2.26.0
bedtools/2.26.0-gcc5
bedtools/2.27.1-gcc5
bedtools/2.29.2
bgen/1.1.4
bidscoin/2.2
bidscoin/3
bids-validator/1.3.1
bids-validator/2019.01
bigdatascript/v0.99999e
bigwigtowig/377-0
bilm-tf/1.0
biobambam/0.0.191
biobambam2/2.0.146
biscuit/0.2.2
biscuit/0.3.8.20180515
bismark/v0.19.1
bison/2.7.1
blas/3.8.0-gcc5
blas/3.8.0-gcc5-pic
blast/2.2.30
blast/2.3.0
blast/2.7.1
blast+/2.9.0
blender/2.81
blender/2.90.1
bolt-lmm/2.3.2
bolt-lmm/2.3.4
boost/1.46.0-gcc5
boost/1.46.1-gcc5
boost/1.52.0-gcc5
boost/1.58.0
boost/1.58.0-gcc5
boost/1.62.0
boost/1.62.0-gcc4
boost/1.67.0-gcc5
boost/1.72.0-gcc8
bowtie/1.1.2
bowtie2/2.2.9
bowtie2/2.3.5
bracken/2.5
brain_age/v1.0_18Jan2018
breseq/0.29.0
breseq/0.33.2
bsoft/1.9.2
bsoft/2.0
busco/3.0.2
buster/20170508
bwa/0.7.12
bwa/0.7.17-gcc5
bwa-meth/0.2.2
bzip2/1.0.6
cactus/1.0.0
caffe/1.0.0
caffe/1.0.0-protbuf32
caffe/caffe-matlab
caffe/caffe-tsn
caffe/deepvistool
caffe/latest
caffe/rc4
caffe2/0.8.1
caffe_enet/1.0
caffe_unet/1.0
caffe_unet/18.04
caffe_unet/2.0
camino/2020-09-21
canu/1.7.1
caret/5.65
caw/0.2.4
cblas/20032302-gcc5
ccp4/7.0
ccp4/7.0.072
ccp4/ccp4i
ccp4/ccp4i2
ccp4/ccp4mg
ccp4/coot
ccp-em/1.3.0
cdhit/4.8.1
cdo/1.9.8
cellprofiler/2.2.0
cellprofiler/3.1.5
cellprofiler/3.1.9
cellprofileranalyst/2.2.0
cellranger/2.0.1
cellranger/3.0.2
centrifuge/1.0.4-beta
checkm/1.1.3
checkv/0.7.0
chimera/1.10.2
chimera/1.11
chimera/1.13
chimera/1.14
chimerax/0.6
chimerax/0.8
chimerax/0.91
chimerax/0.93
chrome/68
chrome/69
chrome/75
chrome/77
chrome/78
chrome/80
chrome/80.0.3987.163
chrome/88.0.4324
chrome/default
chuffed/0.10.3
circos/0.69-6
cistem/1.0.0-beta
clairvoyante/1.02
clamms/1.1
clonalframeml/1.11
cloudcompare/2.11.2
cloudstor/2.3.1-1.1
cloudstor/2.4.1
cloudstor/2.4.2
clustal-omega/1.2.4
cmake/2.8.12.2
cmake/2.8.12.2-gcc5
cmake/3.10.2-gcc4
cmake/3.10.2-gcc4-system
cmake/3.10.2-gcc5
cmake/3.15.1-gcc4-system
cmake/3.15.1-gcc5
cmake/3.15.4-gcc8
cmake/3.5.2
cmake/3.5.2-gcc4
cmake/3.5.2-gcc5
cmkl/9.1.023
cnvkit/0.9.5
cnvnator/0.4.1
colmap/3.6
comsol/5.2a
comsol/5.4
comsol/5.4-ee
conda-install/latest
connectome/1.2.3
convert3d/1.0.0
coot/0.8.9.1
coventormp/1.002
cp2k/5.1.0
cp2k/6.1.0
cp2k/8.1.0
cplex/12.10.0
cplex/12.6
cplex/12.6.3
cplex/12.7.1
cplex/12.8.0
cpmd/3.17.1
cpmd/4.3
crisprcasfinder/1.05
crispresso/1.0.13-gcc5
crossmap/0.3.5
crossmap/0.3.6
cryoef/1.1.0
cryo-em-processing-tool/0.1
cryolo/1.0.0
cryolo/1.1.3
cryolo/1.3.1
cryolo/1.4.0
cryolo/1.4.1
cryolo/1.5.3
cryolo/1.5.6
cryolo/1.6.0
cryolo/1.7.2
cryosparc/beta
cryosparc/cryosparc-cluster
cryosparc/v2
crystallography/0.0.3
cst/2017
ctffind/4.0.17
ctffind/4.1.10
ctffind/4.1.13
ctffind/4.1.14
ctffind/4.1.3
ctffind/4.1.4
ctffind/4.1.8
ctftilt/latest
cuda/10.0
cuda/10.1
cuda/11.0
cuda/4.1
cuda/4.1.bajk
cuda/6.0
cuda/7.0
cuda/7.5
cuda/8.0
cuda/8.0.61
cuda/8.0-DL
cuda/9.0
cuda/9.1
cudadeconv/1.0
cudalibs8to9/0.1
cudnn/5.1
cudnn/5.1-DL
cudnn/7.1.2-cuda8
cudnn/7.1.3-cuda9
cudnn/7.3.0-cuda9
cudnn/7.6.5.32-cuda10
cudnn/7.6.5-cuda10.1
cudnn/8.0.5-cuda10.1
cudnn/8.0.5-cuda11
cufflinks/2.2.1
cunit/2.1.3
cutadapt/0.16
cutadapt/2.5
cutadapt/2.7
cytoscape/3.4.0
dada2/1.14
daris-utils/1.0
darknet/alexey
darknet/darknet_yolo_v3
darknet/latest
dcm2niix/1.0.20200331
dcm2niix/latest
dcmtk/3.6.3
deep-complex-networks/2017
deepemhancer/2020-09-09
deepgraph/0.2.3
deeplabcut/latest
deepmedic/0.6.1
deepmedic/0.7.0
deeptools/3.1.2
deeptools/3.1.3
deepvariant/0.8-cpu
deepvariant/0.8-gpu
dense3dcrf/20160527
detectron/20180322
dftbplus/18.2
dftd3/0.9
dials/1.12.1
dials/1.5.1
diamond/0.9.22
dicomnifti/2.32.1
diyabc/2.1.0
dke/latest
dke/latest-ft
dlib/19.20
dmtcp/2.5.2
dominate/2.3.5
dos2unix/7.4.0
dragondisk/1.0.5
drishti/2.6.3
drishti/2.6.4
drishti/ansto
drmaa/1.0.7
drmaa/1.1.0
dsistudio/latest
dti-tk/2.3.1
dtk/0.6.4.1
dynamo/1.1.178
dynamo/1.1.451
dynet/2.1-cpu
dynet/2.1-gpu
ea-utils/1.1.2
ea-utils/1.1.2-gcc5
eclipse/4.7.3a
eclipse/4.8
effoff/0.2.1
eigen/2.0.17
eigen/3.2.9
eigen/3.3.0
eigen/3.3.7
eigensoft/7.2.1
eiger2cbf/1.0
elastix/5.0.0
elf/1.0
eman/2.12
eman/2.2
eman/2.22
eman/2.3
eman/2.3.1
eman/2.9
emapaccess/1.0
emap-galaxy-shortcut/1.0.0
emap-mytardis-shortcut/1.0.0
emap-wiki-shortcut/0.0.1
emboss/6.6.0
emclarity/1.0.0
emclarity/1.4.3
emspring/spring_v0-84-1470
emspring/spring_v0-84-1470_mlx
epacts/3.3.2
exciting/nitrogen
exonerate/2.4.0
exploredti/4.8.6
fastani/1.1
fastml/3.1
fastml/3.11
fastp/0.20.0
fastqc/0.11.7
fastqc/0.11.9
fastQValidator/0.1.1a
fastspar/0.0.7
faststructure/1.0
fastsurfer/c5894bd
fasttree/2.1.10
fastx-toolkit/0.0.13
fcsalyzer/0.9.12
fdtd/2020a-r1
fdtd/8.21.1933
feedback/1.0.1
ffmpeg/3.4.2
ffmpeg/4.3.1
fftw/3.3.4-gcc
fftw/3.3.5-gcc
fftw/3.3.5-gcc5
fgbio/0.9.0
figtree/1.4.3
fiji/20160808
fiji/20170223
fiji/20170530
fiji/current
fiji/current.bak
fiji/fiji-super-res
fiji/MMI-MHTP
filtlong/0.2.0
fix/1.064
fix/1.068
flash/1.2.11-gcc5
flashpca/2.0
flexbar/3.4.0
flye/2.3.5
flye/2.8
fmriprep/1.0.15
fmriprep/1.1.1
fmriprep/1.2.5
fmriprep/1.3.0_post2
fmriprep/1.4.0
fmriprep/1.4.1
fmriprep/1.5.8
fmriprep/20.2.1
foma/0.9.18
fooof/0.1.3
fooof/1.0.0
fouriertransform/0.2.3
fox/1.6.57
fpart/1.2.0
freebayes/0.9.9
freesurfer/20160922
freesurfer/5.3
freesurfer/6.0
freesurfer/6.0.0-brainvolstatsfixed
freesurfer/6.0-patch
freesurfer/7.1.0
freesurfer/devel-20171013
freesurfer/devel-20180612
freesurfer/devel-20190128
frustum/xflx1992
fsl/5.0.11
fsl/5.0.9
fsl/6.0.0
fsl/6.0.1
fsl/6.0.3
fsleyes/0.22.4
fsleyes/0.23.0
fsleyes/0.24.3
fsleyes/0.32.0
ftgl/2.1
fxtract/2.3
gamess/16srs1
gamess/16srs1-v2
gamess/2018r3
ganon/0.3.3
gap/4.8.10
gatan/free
gatan/uwa
gatk/3.4
gatk/3.7
gatk/4.0.1.1
gatk/4.0.11.0
gatk/4.1.2.0
gatk/4.1.9.0
gatktool/0.0.1
gauss/11.0
gauss/9.0
gaussian/g16a03
gaussian/g16a03_local
gautomatch/0.53
gautomatch/0.56
gautomatch/0.56_cuda10.1
gcat/e48bf8b
gcc/4.9.3
gcc/5.4.0
gcc/6.1.0
gcc/8.1.0
gcta/1.92.2beta
gctf/0.50
gctf/0.66
gctf/1.06
gctf/1.06_cuda8
gctf/1.06_cuda8-uow
gctf/1.08_cuda8
gctf/1.18
gctf/1.18_b2
gctf/1.18_b2_cuda9
gctf/1.18_cuda10.1
gctf/1.18_cuda8
gctf/1.18_cuda8-uow
gctf/1.18_cuda91
gdal/2.2.4
gdal/2.3.1
gdal/3.0.2
gdb/8.2.1
gdcm/2.6.6-gcc4
gdcm/2.6.6-gcc5
gd-devel/2.0.35
geant4/10.02.p03
geant4/10.03.p01
geant4/10.05.p01
geant4/10.6
gem/3.3
gemini/0.30.1
genemark/4.30
genesis/1.5.1
genesis/1.5.1-cpu
gengetopt/2.10
genometools/1.6.1
genomicconsensus/2.3.3
genotypeharmonizer/1.4.20
genrich/v0.6
geos/3.6
geos/3.6.4
geos/3.7.2
gephi/0.9.2
gflags/master
gflags/master-gcc4
ghostscript/9.26
gimp/2.8
gimp/2.8.22
gingerale/2.3.6
git/2.17.0
git/2.19.0
git/2.25.2
git/2.8.1
git-annex/6.20180227
glew/2.0-gcc4
glew/2.0-gcc5
glm/0.9.9.5
glog/master
glog/master-gcc4
glpk/4.60
gmp/6.1.2
gmsh/3.0.3
gnuparallel/20160822
gnuparallel/20190122
gnuplot/5.2.1
go/1.11.1
go/1.13.8
go/1.14.2
goctf/1.1.0
gpu_burn/0.9
gpu_burn/1.0
gpucomputingsdk/4.0.17
graphviz/2.30.1
graphviz/2.40.1
gromacs/2016.3-openmpi-cuda8.0
gromacs/2016.4-openmpi-cuda8.0
gromacs/2016.5-openmpi-cuda8.0-plumed
gromacs/2018.4-openmpi-cuda8.0
gromacs/2018.7-openmpi-cpu-only
gromacs/2018.7-openmpi-cuda-plumed
gromacs/2018-openmpi-cuda8.0
gromacs/2018-openmpi-cuda8.0-NVML
gromacs/2019.4-openmpi-cuda10.0
gromacs/2020.1-openmpi-cuda10.1
gromacs/5.1.4
groot/0.8.6
groot/1.0.2
gsl/1.15.13-system
gsl/2.2-gcc4
gsl/2.2-gcc5
gsl/2.2-system
gsl/2.5-gcc4
gst-devel/1.4.5
gst-libav/1.10.4
gst-libav/1.4.5
gtdb-tk/0.3.2
gtdb-tk/1.3.0
gubbins/2.3.2
gubbins/2.4.1
guppy/3.1.5-1
guppy/3.2.4
guppy/3.5.2-gpu
guppy/4.0.14-gpu
guppy/container
gurobi/7.5.1
gurobi/8.0.0
gurobi/9.0.0
gurobi/9.0.1
gurobi/9.1.0
gvcftools/0.17.0
h5toxds/1.1.0
hal/2.1
halper/2020-08-06
hapflk/1.4
haplomerger2/20180603
haystack_bio/0.5.5
hddm/0.6.0
hdf5/1.10.0-patch1
hdf5/1.10.5
hdfview/3.1.1
heudiconv/0.5.4
hisat2/2.1.0
hmmer/2.4i
hmmer/3.2.1
hmmer/3.3.1
holt-lab/20210205
horovod/0.16.4
hotspot/4.0.0
hpcx/2.5.0-redhat7.6
hpcx/2.5.0-redhat7.7
htop/2.0.1
htseq/0.10.0
htslib/1.7
htslib/1.9
htslib/1.9-gcc5
humann/2.0
huygens/16.10.1-p1
hyperspy/1.4
hyphy/2.5.0
hypre/2.11.2
hypre/2.15.0
icm/3.7-3b
icm/3.8.7
idl/8.6
idl/8.7
idl/8.7-nov
idl/8.8
idr/2.0.3
igv/2.3.81
igv/2.4.19
ihrsr++/v1.5
ilastik/1.2.0
ilastik/1.3.3
ilastik/1.3.3post3
ilastik/1.4.0b5
illumina-utils/2.6
illumina-utils/2.6-python3.7
imagej/20160205
imagemagick/7.0.5-7
imagemagick/7.0.8-23
imagemagick/7.0.8.23-native
imagemagick/7.0.9-27-gcc8
imagescope/11.2.0.780
imblproc/20190405
imod/4.8.54
imod/4.9.12
imod/4.9.9
imod-raza/4.7.12
imosflm/7.2.1
imosflm/7.2.2
impute2/2.3.1
infernal/1.1.4
intel/2015.0.090
intel/2016
intel/2017u4
intel/2018test
intel/2018u3
ior/3.2.1
iqtree/1.5.3
iqtree/1.6.10
iqtree/1.6.2
iqtree/2.0.4-rc
iqtree/2.0-rc1
ismapper/2.0
itasser/5.1
itk/4.10.0-gcc4
itk/4.10.0-gcc5
itk/4.10.0-gcc5-p1
itk/4.13.0-gcc4
itk/4.13.1-gcc4
itk/4.8.2-gcc4
itk/4.8.2-gcc5
itk/ansto
itksnap/3.3.x
itksnap/3.8.0
itksnap/3.8.0-beta
jags/3.3.0
jags/3.4.0
jags/4.3.0
janni/0.2
java/openjdk-1.14.02
java/openjdk-1.8.0_242
jbigkit/2.1
jdk/10-20180320
jdk/14
jellyfish/1.1.12
jellyfish/2.3.0
jellyfish/2.3.0-gcc5
jspr/2017-7-20
juicer/1.6.2
julia/0.6.4
julia/1.3.1
julia/1.5.3
kallisto/0.43.0
kaptive/0.5.1
kilosort/1.0
kindel/0.4.2
king/2.1.6
kleborate/0.2.0
kleborate/0.3.0
kma/1.3.0
kraken/1.1.1
kraken2/2.0.7-beta
krakenuniq/0.5.8
kronatools/2.7.1
kul_vbg/20201103
kul_vbg/e46effe
lammps/20180510
lammps/20181212
lammps/20200303
lammps/20200303-python3
lapack/3.6.1-gcc4
lapack/3.6.1-gcc4-opt
lapack/3.6.1-gcc5
lapack/3.8.0-gcc5
lapack/3.8.0-gcc5-pic
ldmap/28apr15
ldpred/1.0.6
leveldb/master
leveldb/master-gcc4
levelset/0.0.2
libcublas/10.2.1.243-cuda10
libertem/0.5.1
libertem/20190521
libffi/3.2.1
libffi-devel/3.0.13
libfuse/3.6.1
libgd/2.2.4
libgeotiff/1.4.2
libgit2/1.1.0
libharu/2.2.1
libint/1.1.4
libjpeg-turbo/1.4.2
libjpeg-turbo/1.5.1
libjpeg-turbo/1.5.1-shared
libmaus/0.0.196
libmaus2/2.0.704
libsmm/20150702
libssh2/1.9.0
libtiff/3.9.7
libtiff/4.0.10
libunwind/1.3.1
libuuid/2.23.2-43
libxc/4.1.0
libxp/1.0.2
libxsmm/1.9
libzip/0.10.1-8
liggghts/3.8.0
lighter/1.1.2
lkh/2.0.9
lmdb/latest
locarna/1.9.2.3
locuszoom/1.4
locuszoom/1.4.fixed
lofreq/2.1.3.1
lsd/0.3beta
lumpy-sv/0.2.13
macs/3.0.0a5
macs2/2.1.1.20160309
mafft/7.310
mageck/0.5.9.4
magicblast/1.5.0
magic-impute/1.5.5
magma/1.6.1
magma/2.0.2
mainmast/1.0
maker/3.01.03
mango/4.0.1
manta/1.5.0-gcc5
mantid/3.13.0
mantid/3.8.0
mantid/3.9.0
mapdamage/2.0.9
mapdamage/2.0.9-u1
mash/2.1
mash/2.1.1
mash/2.2
materialsstudio/18.1.0
mathematica/12.1.1
mathgl/1.11.2
mathgl/2.0.3
mathgl/2.3.3
matlab/r2012b
matlab/r2014a
matlab/r2014b
matlab/r2015b
matlab/r2016a
matlab/r2017a
matlab/r2017b
matlab/r2017b-caffe
matlab/r2018a
matlab/r2019a
matlab/r2019b
matlab/r2020a
mauve/20150213
maven/3.3.9
maxquant/1.6.5.0
mc/4.8.21
mcl/11-294
mcr2010b/1.0
megahit/1.1.3
megahit/1.2.4-beta
megahit/1.2.9
meld/0.4.14
melt/2.2.2
meme/5.0.1
merantk/1.2.1
mercurial/4.7.1
mesa/13.0.5
mesa/default
meshlab/2016.12-gcc5
meshlab/2019.03
meson/0.51.0
metabat/2.15.5
metabolic/4.0
metacache/1.1.1
metal/2011
metaphlan/2.0
metaphlan/3.0-sif
metaplotr/2018_09
metawrap/1.1.3
mevislab/2.8.1-gcc-64bit
miakat/4.2.6
minc-lib/2.2-git-gcc4
minc-tools/2.2
miniasm/0.3
miniconda3/4.1.11-python3.5
miniconda3/4.8.3-python3.8
minimap2/2.17-r954-dirty
minipolish/0.1.3
minizinc/2
minizinc/2.3.1
minizinc/2.5.2
mixcr/3.0.7
mkl/2018u3
mlst/2.15
mne/TF-201804
molden/5.7
mono/5.20.1.19
moose/1.0
moose/1.0-vtk
morphind/1.4
motif/2.3.4
motioncor2/20180924
motioncor2/20181020
motioncor2/20181020-cuda91
motioncor2/2.1
motioncor2/2.1.10-cuda8
motioncor2/2.1.10-cuda9.1
motioncor2/2.1.3.0-cuda101
motioncor2/2.1.3.0-cuda80
motioncor2/2.1.4.0-cuda101
motioncorr/2.1
motioncorr2/20160822
mpfr/3.1.5
mpifileutils/20170922
mpifileutils/20200701
mpip/3.4.1
mrbayes/3.2.6
mrbayes/3.2.6-mpi
mrf/0.2.2
mriconvert/2.1.0
mricrogl/1.0.20170207
mricrogl/20180623
mricron/06.2013
mricron/30apr2016
mriqc/0.14.2
mriqc/0.15.2.rc1
mriqc/0.9.7
mrpeek/preview2
mrtrix/0.3.15-gcc4
mrtrix/0.3.15-gcc5
mrtrix/0.3.16
mrtrix/20170712
mrtrix/3.0.0
mrtrix/3.0.1
mrtrix/3.0.2
mrtrix/3.0_rc3
mrtrix/3.0_rc3_latest
mrtrix3tissue/5.2.8
mrtrix3tissue/5.2.8-ubuntu
msm_hocr/3.0
multiwfn/3.8
mummer/3.23-gcc5
mummer/4.0.0.beta2-gcc5
muscle/3.8.31
mustem/5.3
mxtools/0.1
mydata/0.9.2
mydata/0.9.2-20201020
mydata-python/20200603
mykrobe/0.8.1
mytardis/0.1
namd/2.12-ibverbs-smp-cuda
namd/2.12-multicore
namd/2.13-multicore
namd/2.13-multicore-CUDA
namd/2.14-multicore
namd/2.14-multicore-cuda
nanconvert/latest
nanofilt/201807
nanopolish/0.10.1
nanopolish/0.11.1
nasm/2.15.03
nccl/2.4.7-cuda10.0
nccl/2.4.7-cuda9.1
nccl/master
nccl/master-gcc4
netcdf/4.4.1.1
netcdf/4.4.1.1-openmpi-1.10.7-mlx
netcdf/4.7.0
netcdf/4.7.1-intel
neuro_workflow/2017v2
neuro-workshop/20191115
new-fugue/2010-06-02
newick-utils/1.6
nextgenmap/0.5.5
ngsqctoolkit/2.3.3
nibabel/2.3.3
niftilib/2.0.0
nighres/1.1.0b1
niistat/9.oct.2016
nilearn/0.6.2
ninja/1.9.0
nis-elements-viewer/4.20
nlopt/2.6.1
nlopt/2.6.1-gcc4
nlopt/2.6.1-gcc5
nn/0.2.4
novactf/03.2018
nsight/2019.5.0
nullarbor/2.0.20181010
objexport/0.0.4
octave/4.2.2
octopus/8.4
octopus/8.4-parallel
omero.insight/5.5.10
omsimulator/2.0.1
openbabel/2.4.1
openblas/0.2.20
openbugs/3.2.3
opencv/3.4.1
opencv/3.4.10-gcc5
opencv/3.4.11
opencv/3.4.1-gcc4
opencv/4.1.0
opencv/4.4.0
openface/2.2.0
openfoam/4.1
openfoam/5-paraview54
openfoam/5.x
openfoam/v1912
openjpeg/2.3.0
openjpeg/2.3.1
openmodelica/1.14.2
openmpi/1.10.3-gcc4-mlx
openmpi/1.10.3-gcc4-mlx-cuda75
openmpi/1.10.3-gcc4-mlx-verbs
openmpi/1.10.3-gcc4-mlx-verbs-cuda75
openmpi/1.10.3-gcc5
openmpi/1.10.3-gcc5-mlx
openmpi/1.10.7-1.mlx
openmpi/1.10.7-intel
openmpi/1.10.7-mlx
openmpi/1.10.7-mlx-intel
openmpi/2.1.6-mlx-intel
openmpi/3.1.4-mlx
openmpi/3.1.6-ucx
opennmt-py/0.7.0
openpose/1.6.0
openrefine/3.1
openslide/3.4.1
orange3/ansto
orca/4.0.1
orca/4.2.1
orca/4.2.1-216
orfm/v0.7.1
osg/3.6.5
otpo/1.0
packer/1.3.5
paleomix/1.2.13.4-python2
paml/4.9
panaroo/1.1.2
panaroo/1.2.4
pandoc/2.7.3
paraview/4.0.1
paraview/5.6.0
paraview/ansto
parliament2/0.1.11
parsyncfp/1.67
partitionfinder2/2.1.1
pastml/1.0
pbh5tools/2018-12-13
pbzip2/1.1.13
peakseq/1.3.1
perl/5.24.0
perl/5.28.0
perl/5.30.1
petsc/3.10.1-gcc5
petsc/3.12.1
petsc/3.13.2-gcc5
pgap/3958
pgi/2019
pgi/2020
phate/0.4.4
phenix/1.11.1
phenix/1.15.1
phenix/1.15.2
phenix/1.18.2
phreeqc/3.5.0
phyloflash/3.4
phyml/3.1
picard/2.19.0
picard/2.9.2
picrust2/2.1.4_b
picrust2/2.2.0_b
pigz/2.3.3
pigz/2.3.4
pilon/1.22
pindel/0.6.3-gcc5
plasmidfinder/2.1
plink/1.7
plink/1.9
plink/1.90b6.10
plink/2.0-alpha
plinkseq/0.10
plumed/2.5.0
pmix/3.1.2
pmix/v2.2
pointless/1.10.28
posgen/0.0.1
posminus/0.2.3
pplacer/v1.1.alpha19
prank/170427
prismatic/1.1
prismatic-cpu/1.1
prodigal/2.6.3
proj/4.9.3
proj/5.1.0
proj/6.2.1
prokka/1.13.3
prokka/1.14.5
prokka/1.14.6
protobuf/master
protobuf/master-gcc4
protomo/2.4.2
psi4/v1.1
psi4/v1.3.2
psortb/3.0
psortm/3.0
pulchra/3.06
purge_haplotigs/1.1.0
pv/1.6.6
py4dstem/0.11.5
py4dstem/0.3
pybids/0.9.1
pycharm/2018.3.3
pydeface/1.1.0
pyem/v0.1
pyem/v0.1-201806
pyem/v0.3
pyem/v0.5
pymol/1.8.2.1
pymol/1.8.6
pymol/2.4.0a0
pyprismatic/1.1.16
pyprismatic/1.2.1
pypy/7.0.0-3.6
pysam/0.15.2-python2
python/2.7.11-gcc
python/2.7.12-gcc4
python/2.7.12-gcc5
python/2.7.15-gcc5
python/2.7.17-gcc8
python/2.7.18-gcc5
python/2.7.18-ucs4-gcc5
python/3.5.2-gcc
python/3.5.2-gcc4
python/3.5.2-gcc5
python/3.6.2
python/3.6.2-static
python/3.6.6-gcc5
python/3.7.2-gcc6
python/3.7.3-system
python/3.8.5
python/3.8.5-gcc8
python/3.8.5-gcc8-static
pytom/0.971
pytorch/1.0-cuda10
pytorch/1.1-cuda10
pytorch/1.3-cuda10
pytorch/1.5-cuda10
pytorch/1.6-cuda10
pyxnat/1.1.0.2
pyxnat/20170308
qatools/1.2
qctools/v2.0-beta
qgis/3.9.0
qhull/2003.1
qhull/2015.2
qiaseq-dna/1.0
qiaseq-dna/14.1
qiime1/1.9.1
qiime2/2017.9
qiime2/2018.11
qiime2/2018.2
qiime2/2018.4
qiime2/2019.1
qiime2/2019.4
qiime2/2019.7-q2_scnic
qiime2/2019.7-q2_scnic_2
qiime2/2020.8
qit/2148
qt/5.7.1-gcc5
qt5-qtwebkit/5.9.1
quicktree/2.0
quicktree/2.5
quit/1.1
quit/2.0.2
qupath/0.2.0-m4
R/3.3.1
R/3.4.3
R/3.5.0
R/3.5.1
R/3.5.2-openblas
R/3.5.3-mkl
R/3.6.0-mkl
R/3.6.2-mkl
R/4.0.0-openblas
racon/1.3.1
raremetal/4.15.1
raven/1.1.10
raxml/8.2.12
raxml/8.2.9
raxml-ng/1.0.0
razers3/3.5.8
rclone/1.49.3
rdf-kd/0.0.1
rdkit/2019.03.3.0
readline/7.0
relion/1.4
relion/2.02
relion/2.0.6
relion/2.0beta
relion/2.1
relion/2.1.b1
relion/2.1.b2
relion/2.1-openmpi-1.10.7-mlx
relion/3.0-20181109-cuda80
relion/3.0-20181109-cuda91
relion/3.0-20190115
relion/3.0.5
relion/3.0.5-uow
relion/3.0.6
relion/3.0.6-uow
relion/3.0.7
relion/3.0.7-uow
relion/3.0.7-uow-cuda10.1
relion/3.0.7-uow-mc2.1.3.0
relion/3.0-beta
relion/3.0-stable
relion/3.0-stable-cuda91
relion/3.0-stable-uow
relion/3.0-uow-20180904
relion/3.0-uow-20180917
relion/3.0-uow-20181109-cuda80
relion/3.0-uow-20181109-cuda91
relion/3.0-uow-20190115
relion/3.1.0
relion/3.1.0-uow
relion/3.1.0-uow-v2
relion/3.1_beta
relion/3.1_beta-20191105
relion/3.1_beta-20191113
relion/3.1_beta-20200109
relion/3.1_beta-latest
relion/ver3.1_20200904
relion/ver3.1_20200925
relion/ver3.1_20201028
relion/ver3.1_20201102
repeatmasker/4.1.1
resmap/1.1.4
resmap/1.1.5
resmap/1.9.5
rest/1.8
rest/1.8-matlab2017a.r6685
retools/1.3
rgi/5.1.0
rings/1.3.3
r-launcher/0.0.1
rmblast/2.10.0
rnammer/1.2
roary/3.11.2
roary/3.12.0
robex/1.2
root/5.34.32
root/6.22.02
rosetta/2018.09
rosetta/2020.08
rosetta/2020.37-mpi
rsem/1.3.0
rseqc/3.0.0
rstudio/1.0.143
rstudio/1.0.44
rstudio/1.1.414
rstudio/1.1.463
rstudio/1.1.463-r3.5.3-mkl
rstudio/1.1.463-r3.6.0-mkl
rstudioserver_epigenetics/1.0
rstudioserver_epigenetics/1.0-20171101
rsync/3.1.3
rtk/ansto
saintexpress/3.6.3
salmon/0.14.1
salome/9.2.0
sambamba/0.8.0
samblaster/0.1.26
samclip/0.2
samtools/0.1.18
samtools/1.10
samtools/1.3.1
samtools/1.6
samtools/1.7
samtools/1.7-gcc5
samtools/1.9
samtools/1.9-gcc5
sas/9.4
sbt/0.13.15
sbt/1.2.1
scalapack/2.0.2
scipion/2.0
scipion/devel
scipion/devel-20170327
scipion/v1.0.1_2016-06-30
scipion/v1.1
scipion/v1.1.1
scipion/v1.2
scipion/v1.2.1
scipion/v1.2.1_2018-10-01
scrappie/1.4.1
sdm_1d_calculate/2.0.2
sdm_1d_plot/0.0.4
sdm_2d_calculate/2.0.2
sdm_2d_plot/0.0.4
seacr/1.3
segadapter/1.9
seqgen/1.3.4
seqtk/1.3
shapeit/v2_r837
shapeit/v2_r904
shovill/1.0.4
sidesplitter/120220
sidesplitter/20201028
simnibs/2.0.1g
simnibs/3.1.2
simple/2.1
simple/2.5
simul-atrophy/12-09-2017
simul-atrophy/rjbcompilepetscmaster-04032020
simul-atrophy/RJBCompilePetscMaster-190220
singlem/0.12.1
singularity/2.3.1
singularity/2.4.2
singularity/2.4.5
singularity/2.5.2
singularity/3.0
singularity/3.0.1
singularity/3.0.2
singularity/3.1.0
singularity/3.2.0
singularity/3.2.1
singularity/3.4.0
singularity/3.5.2
singularity/3.5.3
singularity/3.7.1
singularity/d3d0f3fdc4390c7e14a6065543fc85dd69ba42b7
situs/3.1
ska/1.0-e1968f0
skesa/2.2.1
skesa/2.3
skewer/20170212
slamdunk/latest
slim/3.2
slim/3.3.1
slurm/17.11.4
smafa/0.5.0
smcounter/10apr2017
smux/0.0.1
snap/2013-11-29
snappy/master
snappy/master-gcc4
snippy/4.3.8
snippy/4.4.5
snoscan/1.0
snp-dists/0.6.3
snpeff/4.3t
snpm/13
snp-sites/2.5.1
soapdenovo2/2.04-r241
sortmerna/2.1b
sourcetracker/2.0.1
spades/3.12.0
spades/3.13.1
sparseassembler/1.0
sparsehash/2.0.3
spectra/0.8.1
speedseq/0.1.2
spider/21.11
spm12/matlab2015b.r6685
spm12/matlab2018a.r6685
spm12/matlab2018a.r7487
spm8/matlab2015b.r6685
spm8/matlab2017a.r6685
spring/0.86.1661
spss/26
sqlite3/3.30.1
squashfs-tools/4.3-0.21
squashfuse/0.1.103
sra-tools/2.7.0
sra-tools/2.9.2
sra-tools/2.9.4
sra-tools/2.9.6
srst2/0.2.0
srst2/0.2.0-2019
stacks/2.4
star/2.5.2b
star-fusion/1.9.1
stata/14
stata/14.2
stata/16
stisuite/3.0
strelka/2.8.4
stringtie/1.3.5
stringtie/1.3.6
structure/2.3.4
subread/1.5.1
subread/2.0.1
subversion/1.9.5
suitesparse/5.4.0
sumo/1.5.0
superlu/3.1
surfice/7_feb_2017
svd/1.4
svs/8.8.3
swig/3.0.12
swig/4.0.1
synopsys/3.1
tannertools/2016.1
tannertools/2016.2
tapsim/v1.0b_r766
tbb/20180312oss
tempest/1.5
tensorflow/1.0.0-python2.7.12-gcc5
tensorflow/1.10.0-pytorch
tensorflow/1.10.0-pytorch-all
tensorflow/1.10.0-pytorch-keras
tensorflow/1.12.0-python2.7.12-gcc5
tensorflow/1.12.0-python3.6-gcc5
tensorflow/1.13.1-gdal
tensorflow/1.14.0-keras
tensorflow/1.14.0-keras-pydicom
tensorflow/1.15.2-gpu
tensorflow/1.15.2-python3.7.3-gcc8
tensorflow/1.3.0-python2.7.12-gcc5
tensorflow/1.4.0-python2.7.12-gcc5
tensorflow/1.4.0-python3.6-gcc5
tensorflow/2.0.0-beta1
tensorflow/2.0.0-gpu
tensorflow/2.2.0
tensorflow/2.3.0
tensorflow/2.4.1
tensorrt/6.0.1.5-cuda10
tensorrt/7.0.0.11-cuda10
tensorrt/7.2.1
terastitcher/20171106
texlive/2017
tiff/4.0.8
tigervnc/1.8.0
tmap/3.0.1
tomowarp2/03f3ee8
toothmaker/0.64
topaz/1.0
topaz/latest
tophat/2.1.1
tracer/1.6
trackvis/0.6.1
tractseg/2.0
transdecoder/5.5.0
trf/4.09.1
trim_galore/0.4.5
trim_galore/0.5.0
trimmomatic/0.38
trinity/2.8.5
trinity/2.8.5-gcc5
turbovnc/2.0.2
turbovnc/2.1.0
tvips-tools/0.0.3
ucsc-genome-tools/201806
ucx/1.6.1
udunits2/2.2.20-2
ufo-kit/ansto
umap/0.3.8
umi-tools/0.5.5-python2
umi-tools/0.5.5-python3
unblur/1.0.2
underworld/2.3.0
underworld/2.8.0b
underworld/2.9.1b
underworld/2.9.4b
unicycler/0.4.7
unimelb-mf-clients/0.2.7
unimelb-mf-clients/0.3.2
unrar/5.0
v8/3.14.5.10-25
vaa3d/3.601
valgrind/3.13
varscan/2.3.9
vasp/5.4.4
vasp/5.4.4.eyk
vcftools/0.1.15
vdjtools/1.2.1
vegas2/v02
velvet/1.2.10
velvet/1.2.10-modified
vep/90
vep/94
viennarna/2.4.15
vigra/1.9.0
vim/8.0.0596
vim/8.2
vim/8.2-container
viptreegen/1.1.2
virsorter/1.0.6
virtualgl/2.5.0
virtualgl/2.5.2
virtualgl/2.6.2
visit/2.12.3
vmd/1.9.3
vmd/1.9.4
volview/3.4
voro++/0.4.6
vscode/1.39.2
vscode/1.53.2
vsearch/2.13.6
vt/0.57
vtk/5.10.1
vtk/5.10.1-gcc4
vtk/7.0.0
vtk/7.0.0-gcc5
wasp/0.3.0
weblogo/3.7
wfu_pickatlas/3.0.5b
wgsim/0.3.1-r13
workspace/4.0.2
wtdbg2/2.5
wxgtk/3.0.2
wxwidgets/3.0.3
x2goclient/4.1.2.1
x2goclient/4.1.2.2
xcpengine/1.2.3
xds/20170302
xds/monash
xds/mxbeamteam
xjview/9.0
xjview/9.6
xjview/9.7
xnat-desktop/0.96
xnat-desktop/1.0.40
xnatpy/0.3.18
xnat-upload-assistant/1.1.3
xnat-utils/0.2.1
xnat-utils/0.2.5
xnat-utils/0.2.6
xnat-utils/0.4.5
xnat-utils/0.4.6
xnat-utils/0.4.9
xnat-utils/0.5.3
xnat-utils/0.5.5
xvfb/1.19.3
yade/1.20.0-cpu
yade/1.20.0-gpu
yade/2019-06-20
yade/2019-06-20-cpu
yade/yade-daily-may-2019
yasm/1.2.0-4
yasm/1.3.0
zetastitcher/0.3.3
zlib/1.2.11
zoem/11-166
zoltan/3.83
zopfli/1.0.3
zstd/1.4.0
#!/bin/bash
# Generate report for applications that are to be deprecated
name=$1
ver=$2
report=./${name}_${ver}.txt
echo 'module logging stat: ' > $report
./usage_stat $name $ver 2>&1 | tee -a $report
echo '' >> $report
echo 'softwares that depend on '$name/$ver >> $report
./listdeps $name $ver
cat $name/$ver >> $report
rm $name/$ver
rmdir $name
#!/bin/bash
inputfile=$1
for app in $(cat $inputfile); do
version=$(ls /usr/local/Modules/modulefiles/$app)
mtime=360 ./application_report $app $version
done
#!/bin/bash
# NUM_MODULE does not necessarily corresponds to the output of `ls -l $TESTCASE_DIR | wc -l` because
# 1. one folder is created for each software but NUM_MODULE pick combinations of from software/version
# 2. if a software does not modify PATH variable then there is no bintest generated for them, the number of those softwares are printed at the end
NUM_MODULE=${1:-100}
TESTCASE_DIR='./tests'
APPLICATION_LIST='./application_list.txt'
# USAGE_REPORT=software_usage.txt
# if [ ! -f $USAGE_REPORT ]; then
# software_usage $USAGE_REPORT
# fi
echo '#!/bin/bash
name=$( realpath $0 | xargs dirname | xargs basename )
ver=${1:-"default"}
bin_dirs=$(module show $name/$ver 2>&1 | sed -n "s/.* PATH \(.*\)/\1/p")
NAME_EXEC_EXIST=false
for bin in $bin_dirs; do
for i in $(find $bin -maxdepth 1 -executable -type f ); do
if [[ $(basename $i) == $name ]]; then
$i --version;
exit $?
fi
done
done
' > $TESTCASE_DIR/bintest
echo "Starting generate bintest for $NUM_MODULE softwares"
declare -i NUM_LIB=0
declare -i GENERATED_NUM=0
IFS_orig=$IFS
IFS=$'\n'
for i in $(head -n $NUM_MODULE $APPLICATION_LIST); do
m=$(sed -n 's/\(.*\)\/\(.*\)/\1 \2/p' <<< $i)
name=$(cut -f1 -d' ' <<< $m)
ver=$(cut -f2 -d' ' <<< $m)
MODIFIED_PATH=$(module show $name/$ver 2>&1 | sed -n 's/.* PATH \(.*\)/\1/p')
if ! [ -z "${MODIFIED_PATH}" ]; then
mkdir -p $TESTCASE_DIR/$name
cp $TESTCASE_DIR/bintest $TESTCASE_DIR/$name/bintest
chmod u+x $TESTCASE_DIR/$name/bintest
GENERATED_NUM+=1
else
NUM_LIB+=1
fi
done
IFS=$IFS_orig
echo "Successfully generated bintests for $GENERATED_NUM applications"
echo "There are $NUM_LIB libraries (does not modify PATH), no testcases generated for them"
#!/bin/bash
TARGET_MODULEPATH="/usr/local/Modules/modulefiles"
OUTPUT_PATH="application_list.txt"
echo "" > $OUTPUT_PATH
MODULEPATH=$TARGET_MODULEPATH module avail -t 2>&1 | python -c "
import sys
with open('$OUTPUT_PATH', 'w') as fout:
for line in sys.stdin:
linelist = line.split('/')
if len(linelist) != 2:
continue
linelist[-1] = linelist[-1].replace('(default)','')
fout.write('/'.join(linelist))
"
\ No newline at end of file
#!/bin/bash
# This script does not detect implicit dependency
# For example, if A -> B -> openmpi/1.10.7-mlx, only module B will be detected
set -e
software=$1
if [ -z $1 ]; then
echo 'please specify target software'
echo 'input can be either '
echo 'listdeps <name>'
echo 'OR'
echo 'listdeps <name> <version>'
exit 1
fi
version=$2
if [ ! -z ${version} ]; then
version=( $version )
else
version=( $(ls /usr/local/Modules/modulefiles/$software) )
fi
mkdir -p $software
for ver in ${version[@]}; do touch $software/$ver;done
for i in $(ls -d /usr/local/*/); do
name=$(basename $i)
if [[ -r "/usr/local/Modules/modulefiles/$name" ]]; then
vers=$(ls /usr/local/Modules/modulefiles/$name)
fi
for ver in ${vers[@]}; do
load_ver=$(module show $name/$ver 2>&1 | sed -n "s/module.*load\ ${software}\/\(.*\)/\1/p")
if [ ! -z $load_ver ] && [ -f $software/$load_ver ]; then
echo $name/$ver >> $software/$load_ver
fi
done
done
set +e
#!/usr/bin/env python3
# run module load python/3.8.5-gcc8-static first
from pathlib import Path
import os
import pandas as pd
import time
import datetime
import collections
import numpy as np
nweek = 12
modulepath = Path('/usr/local/Modules/modulefiles')
softwares = list(modulepath.glob('*/'))
modulefiles = [list(i.glob('**/[!.]*')) for i in softwares if i.is_dir() and os.access(i, os.R_OK)]
lastm = {datetime.datetime.fromtimestamp(j.stat().st_mtime): j for i in modulefiles for j in i}
today = datetime.date.today()
week = datetime.timedelta(weeks=1)
interval = np.array([today - week*i for i in range(nweek+1)][::-1])
def quantize_week(date):
week = interval[:-1][(interval[:-1] <= date) & (date < interval[1:])]
return week.item()
byweek = collections.defaultdict(list)
for k, v in lastm.items():
mod_day = datetime.date(*k.timetuple()[:3])
if mod_day >= interval[0]:
byweek[quantize_week(mod_day)].append(v)
avg_install = np.mean([len(i) for i in byweek.values()])
print(avg_install)
#!/bin/bash
# each application will create a folder under OUTPUT_DIR
declare -i NUM_MODULE
declare -i DEFAULT_NUM_MODULE
declare -i TIMEOUT
declare -i DEFAULT_TIMEOUT
DEFAULT_OUTPUT_DIR='./output_dir'
DEFAULT_NUM_MODULE=100
DEFAULT_TESTCASE_DIR='./tests'
DEFAULT_TIMEOUT=10
DEFAULT_APPLICATION_LIST='./application_list.txt'
function usage {
echo "Run smoke test"
echo "Usage: $0"
echo " -a <list of applications to test>. This should point to a file with lines of '<name>/<version>', it can be generated using either generate_modules_list or software_usage.default: $DEFAULT_APPLICATION_LIST"
echo " -n <number of modules to test>. Specify a large number to test all softwares, default: $DEFAULT_NUM_MODULE"
echo " -o <output directory>, Output file will be generated at OUTPUT_DIR/report.txt, default: $DEFAULT_OUTPUT_DIR"
echo " -t <testcases directories>.Testcases will be loaded from this directory. default: $DEFAULT_TESTCASE_DIR"
echo " -T <timout period in seconds>, default: $DEFAULT_TIMEOUT"
exit 1
}
while getopts ":n:a:o:t:T:" opt; do
case "$opt" in
a)
APPLICATION_LIST=$OPTARG
;;
o)
OUTPUT_DIR="$OPTARG"
;;
n)
NUM_MODULE=$OPTARG
;;
t)
TESTCASE_DIR=$OPTARG
;;
T)
TIMEOUT=$OPTARG
;;
*)
usage
;;
esac
done
if [ -z $APPLICATION_LIST ]; then
APPLICATION_LIST=$(realpath $DEFAULT_APPLICATION_LIST)
fi
if [ -z $OUTPUT_DIR ]; then
OUTPUT_DIR=$(realpath $DEFAULT_OUTPUT_DIR)
fi
if [ -z $NUM_MODULE ]; then
NUM_MODULE=$DEFAULT_NUM_MODULE
fi
if [ -z $TESTCASE_DIR ]; then
TESTCASE_DIR=$(realpath $DEFAULT_TESTCASE_DIR)
fi
if [ -z $TIMEOUT]; then
TIMEOUT=$DEFAULT_TIMEOUT
fi
BINTEST=true
REPORT="$OUTPUT_DIR/report.txt"
FAILED_MODULE_LOGS="$OUTPUT_DIR/failed_modules.txt"
echo '' > $FAILED_MODULE_LOGS
echo '' > $REPORT
declare -i num_success=0
declare -i num_failed=0
declare -i num_notest=0
declare -i minor_error=0
function generate_output_directory {
mkdir -p $OUTPUT_DIR
touch $REPORT
echo '' > $REPORT
echo "Test report created at $REPORT"
}
# function generate_software_usage {
# SOFTWARE_USAGE_PATH=software_usage.txt
# if [ ! -f $SOFTWARE_USAGE_PATH ]; then
# ./software_usage $SOFTWARE_USAGE_PATH
# fi
# }
generate_output_directory
IFS_orig=$IFS
IFS=$'\n'
for i in $(head -n $NUM_MODULE $APPLICATION_LIST); do
# m=$(sed -n 's/\(.*\)\/\(.*\) - \(.*\)/\1 \2/p' <<< $i)
m=$(sed -n 's/\(.*\)\/\(.*\)/\1 \2/p' <<< $i)
name=$(cut -f1 -d' ' <<< $m)
ver=$(cut -f2 -d' ' <<< $m)
printf "\n\n=========================\n"
if [ -z $name ]; then
continue
fi
echo "Testing $name/$ver ... "
module purge
module load $name/$ver
mkdir -p $OUTPUT_DIR/$name
echo '' > $OUTPUT_DIR/$name/$ver
if [ ! -d $TESTCASE_DIR/$name ]; then
echo "No testcase for $m found" | tee -a $REPORT
num_notest+=1
else
ALL_EXEC=$(find $TESTCASE_DIR/$name -maxdepth 1 -executable -type f)
module_failed=false
for t_case in ${ALL_EXEC}; do
if [ $i == 'bintest' ] && ! $BINTEST;then break;fi
orig_dir=$PWD
cd $(dirname $t_case) # in case the testcase need to compile some source from the directory
timeout $TIMEOUT $t_case $ver &> $OUTPUT_DIR/$name/$ver
exitcode=$?
cd $orig_dir
if [ $exitcode -eq 0 ]; then
num_success+=1
else
if [ $exitcode -eq 1 ]; then
minor_error+=1
else
num_failed+=1
module_failed=true
fi
echo "$m return non-zero exitcode $exitcode for testcase $t_case" 2>&1 | tee -a $REPORT
fi
done
if $module_failed ; then
echo "$name/$ver failed"
echo "$name/$ver" >> $FAILED_MODULE_LOGS
fi
fi
module unload $name/$ver
printf "Done"
done
IFS=$IFS_orig
echo "Testcase summary" 2>&1 | tee -a $REPORT
echo "Failed: $num_failed" 2>&1 | tee -a $REPORT
echo "Success: $num_success" 2>&1 | tee -a $REPORT
echo "minor error: $minor_error, this is often due to executable not having --version option" 2>&1 | tee -a $REPORT
echo "Num applications that has no test case: $num_notest" 2>&1 | tee -a $REPORT
if [ "$num_success" -lt $(( $NUM_MODULE / 2 )) ]; then
echo "Failing because not enough tests succeeded" 2>&1 | tee -a $REPORT
exit 2
fi
[ "$num_failed" -eq "0" ]