...
 
Commits (2)
......@@ -137,7 +137,7 @@ def appendDayPhase(casefile, outFn = None, sepType = ','):
with open(outfn, mode='w', newline ='') as out_file:
fieldnames = reader.fieldnames#'bl_admission bl_ucstartday bl_costartday bl_crstartday bl_restartday bl_lastknownstatus bl_duration_inpatientstay'.split(' ') # bl_duration_icustay bl_duration_ventilation bl_observationalperiod
writer = csv.DictWriter(out_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, fieldnames=fieldnames+'ci_d10 ci_d20 ci_death'.split(' '))
writer = csv.DictWriter(out_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, fieldnames=fieldnames+'ci_d5 ci_d10 ci_d20 ci_death'.split(' '))
writer.writeheader()
for row in reader:
......@@ -152,7 +152,7 @@ def appendDayPhase(casefile, outFn = None, sepType = ','):
#out['ci_d0'] = getPhase(row, 0)
#out['ci_d1'] = getPhase(row, 1)
#out['ci_d5'] = getPhase(row, 5)
out['ci_d5'] = getPhase(row, 5)
out['ci_d10'] = getPhase(row, 10)
out['ci_d20'] = getPhase(row, 20)
out['ci_death'] = getDeath(row)
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -222,40 +222,52 @@ def addCalculatedFields(row):
phase10 = row[f'ci_d10']
phase20 = row[f'ci_d20']
phase5 = row[f'ci_d5']
row[f'ci_death_day10'] = row['ci_death'] == row['ci_d10'] if row['ci_death'] != '*' else '*'
row[f'ci_death_day20'] = row['ci_death'] == row['ci_d20'] if row['ci_death'] != '*' else '*'
row[f'ci_death_day5'] = row['ci_death'] == row['ci_d5'] if row['ci_death'] != '*' else '*'
row[f'ci_hypoxaemia_day10'] = row[f'ci_hypoxaemia_{phase10}']
row[f'ci_hypoxaemia_day20'] = row[f'ci_hypoxaemia_{phase20}']
row[f'ci_hypoxaemia_day5'] = row[f'ci_hypoxaemia_{phase5}']
row[f'ci_cardiac_output_day10'] = row[f'ci_cardiac_output_{phase10}']
row[f'ci_cardiac_output_day20'] = row[f'ci_cardiac_output_{phase20}']
row[f'ci_cardiac_output_day5'] = row[f'ci_cardiac_output_{phase5}']
row[f'ci_coagulation_day10'] = row[f'ci_coagulation_{phase10}']
row[f'ci_coagulation_day20'] = row[f'ci_coagulation_{phase20}']
row[f'ci_coagulation_day5'] = row[f'ci_coagulation_{phase5}']
row[f'ci_intravas_volume_day10'] = row[f'ci_intravas_volume_{phase10}']
row[f'ci_intravas_volume_day20'] = row[f'ci_intravas_volume_{phase20}']
row[f'ci_intravas_volume_day5'] = row[f'ci_intravas_volume_{phase5}']
row[f'ci_end_organ_perf_day10'] = row[f'ci_end_organ_perf_{phase10}']
row[f'ci_end_organ_perf_day20'] = row[f'ci_end_organ_perf_{phase20}']
row[f'ci_end_organ_perf_day5'] = row[f'ci_end_organ_perf_{phase5}']
row[f'ci_sys_immune_resp_day10'] = row[f'ci_sys_immune_resp_{phase10}']
row[f'ci_sys_immune_resp_day20'] = row[f'ci_sys_immune_resp_{phase20}']
row[f'ci_sys_immune_resp_day5'] = row[f'ci_sys_immune_resp_{phase5}']
row[f'ci_ventilator_day10'] = row[f'ci_ventilator_{phase10}']
row[f'ci_ventilator_day20'] = row[f'ci_ventilator_{phase20}']
row[f'ci_ventilator_day5'] = row[f'ci_ventilator_{phase5}']
row[f'ci_antiviral_treat_day10'] = row[f'ci_antiviral_treat_{phase10}']
row[f'ci_antiviral_treat_day20'] = row[f'ci_antiviral_treat_{phase20}']
row[f'ci_antiviral_treat_day5'] = row[f'ci_antiviral_treat_{phase5}']
row[f'ci_anticoag_treat_day10'] = row[f'ci_anticoag_treat_{phase10}']
row[f'ci_anticoag_treat_day20'] = row[f'ci_anticoag_treat_{phase20}']
row[f'ci_anticoag_treat_day5'] = row[f'ci_anticoag_treat_{phase5}']
row[f'ci_antiinflam_treat_day10'] = row[f'ci_antiinflam_treat_{phase10}']
row[f'ci_antiinflam_treat_day20'] = row[f'ci_antiinflam_treat_{phase20}']
row[f'ci_antiinflam_treat_day5'] = row[f'ci_antiinflam_treat_{phase5}']
# Clean data
......@@ -396,11 +408,19 @@ def doMakeBnSafe():
scripts = [
clean,
doMakeBnSafe,
'make_respiratory.py',
['validations.py', 'respiratory'],
['validations.py', 'progressionPrevious'],
['make_progression.py', 'nolatent'],
['validations.py', 'progressionNoLatent'],
['make_progression.py', 'latent'],
'make_nb.py',
'make_respiratory.py',
'validations.py',
['validations.py', 'progressionLatent'],
# 'make_nb.py',
# 'validations.py',
## Old/inactive
# 'make_structures.py',
......
import _env, csv, math, random, json, bni_netica, time, re, traceback
import _env, csv, math, random, json, bni_netica, time, re, traceback, sys
from bni_smile import Net
toRun = None
if len(sys.argv)>1:
toRun = sys.argv[1:]
def openNet(fn):
return Net(fn)
# if re.search(r'\.dne', fn):
......@@ -138,6 +142,30 @@ def readData(csvFn, shuffle = False):
return data
def validateScenarios(outJsonFn, netFns, data, subsets, targets, targetPosStates):
cachedNets = {fn:openNet(fn) for fn in netFns}
allResults = {}
for subsetName, subset in subsets.items():
for netFn in netFns:
#net = Net(netFn)
net = cachedNets[netFn]
resultsKey = f'{netFn} - {subsetName}'
print(resultsKey)
try:
res, targetDistros = validate(net, data, targets, targetPosStates, subset = subset)
allResults[resultsKey] = {'res':res,'targetDistros':targetDistros}
print(json.dumps(res, indent='\t'))
except:
print('Error, skipping')
traceback.print_exc()
# Update file after every subset
with open(f'bns/{outJsonFn}', 'w') as resultsFile:
resultsFile.write(json.dumps(allResults))
def doValidations():
data = readData('data/LEOSS_encoded_data_clean_bnsafe.csv')
netFns = [
......@@ -198,27 +226,9 @@ def doValidations():
'All': None,
}
cachedNets = {fn:openNet(fn) for fn in netFns}
if not toRun or 'nbtan' in toRun:
validateScenarios('results_nbtan.json', netFns, data, subsets, targets, targetPosStates)
allResults = {}
for subsetName, subset in subsets.items():
for netFn in netFns:
#net = Net(netFn)
net = cachedNets[netFn]
resultsKey = f'{netFn} - {subsetName}'
print(resultsKey)
try:
res, targetDistros = validate(net, data, targets, targetPosStates, subset = subset)
allResults[resultsKey] = {'res':res,'targetDistros':targetDistros}
print(json.dumps(res, indent='\t'))
except:
print('Error, skipping')
traceback.print_exc()
# Update file after every subset
with open('bns/results.json', 'w') as resultsFile:
resultsFile.write(json.dumps(allResults))
data = readData('data/respiratory.csv')
netFns = [
'bns/respiratory/resp2_obsStructure2.dne.trained.dne',
......@@ -235,23 +245,68 @@ def doValidations():
'All': None
}
#allResults = {}
for subsetName, subset in subsets.items():
for netFn in netFns:
net = bni_netica.Net(netFn)
resultsKey = f'{netFn} - {subsetName}'
print(resultsKey)
try:
res, targetDistros = validate(net, data, targets, targetPosStates, subset)
allResults[resultsKey] = {'res':res,'targetDistros':targetDistros}
print(json.dumps(res, indent='\t'))
except Exception as e:
print('Error, skipping', e)
traceback.print_exc()
if not toRun or 'respiratory' in toRun:
validateScenarios('results_respiratory.json', netFns, data, subsets, targets, targetPosStates)
background = ['ci_age_group_bg', 'ci_gender_bg', 'ci_metabolic_syndrome_bg', 'ci_smoking_history_bg', 'ci_diabetes_bg', 'ci_chronic_pul_disease_bg', 'ci_chronic_cardiac_disease_bg']
baseline = ['ci_antiviral_treat_bl', 'ci_sys_immune_resp_bl', 'ci_antiinflam_treat_bl', 'func_vas_bl', 'ci_coagulation_bl', 'ci_end_organ_perf_bl', 'func_pul_bl', 'ci_ventilator_bl', 'ci_hypoxaemia_bl', 'ci_intravas_volume_bl', 'func_car_bl', 'ci_anticoag_treat_bl', 'ci_cardiac_output_bl']
data = readData('data/LEOSS_encoded_data_clean_trim.csv')
netFns = [
'bns/progression/previous_progression.trained.xdsl',
'bns/progression/previous_progression_latent.trained.xdsl',
]
targets = ['ci_death_day5', 'ci_cardiac_output_day5','ci_hypoxaemia_day5','ci_sys_immune_resp_day5']
targetPosStates = [1, 0, 0, 2]
subsets = {
'background': background,
'background+baseline': background+baseline,
'All': None
}
if not toRun or 'progressionPrevious' in toRun:
validateScenarios('results_progression.json', netFns, data, subsets, targets, targetPosStates)
data = readData('data/LEOSS_encoded_data_clean_trim.csv')
netFns = [
'bns/progression/progression_day10.trained.xdsl',
]
targets = ['ci_death_day10', 'ci_cardiac_output_day10','ci_hypoxaemia_day10','ci_sys_immune_resp_day10']
targetPosStates = [1, 0, 0, 2]
subsets = {
'background': background,
'background+baseline': background+baseline,
'All': None
}
if not toRun or 'progressionNoLatent' in toRun:
validateScenarios('results_progressionNoLatent.json', netFns, data, subsets, targets, targetPosStates)
netFns = [
'bns/progression/progression_latent_day10.trained.xdsl',
]
if not toRun or 'progressionLatent' in toRun:
validateScenarios('results_progressionLatent.json', netFns, data, subsets, targets, targetPosStates)
# #allResults = {}
# for subsetName, subset in subsets.items():
# for netFn in netFns:
# net = bni_netica.Net(netFn)
# resultsKey = f'{netFn} - {subsetName}'
# print(resultsKey)
# try:
# res, targetDistros = validate(net, data, targets, targetPosStates, subset)
# allResults[resultsKey] = {'res':res,'targetDistros':targetDistros}
# print(json.dumps(res, indent='\t'))
# except Exception as e:
# print('Error, skipping', e)
# traceback.print_exc()
# Update file after every subset
with open('bns/results.json', 'w') as resultsFile:
resultsFile.write(json.dumps(allResults))
# # Update file after every subset
# with open('bns/results.json', 'w') as resultsFile:
# resultsFile.write(json.dumps(allResults))
doValidations()
\ No newline at end of file