In [1]:
%pylab inline
import sys
import os.path as op
from pathlib import Path
import shutil
# sys.path.insert(0, "/home/mjirik/projects/pyseg_base/")
sys.path.insert(0, op.abspath("../"))
import scipy
import time
import pandas as pd
from imcut import pycut
import sed3
import itertools
import data2tex as dtt
import io3d
latex_dir = Path("../../papers/cmbbeiv19/tmp/")
dtt.set_output(latex_dir)
# sh 155,160, r10, dpoff 3, seeds 3
# dp_ircad_id = [1, 11]
# dp_ircad_id = [1, 5, 6, 7]
dp_ircad_id = [1, 5, 6, 7, 11, 20]
# dp_ircad_id = [11, 20]
# dp_ircad_id = [1]
dp_keys = ["left_kidney"]
working_voxelsize_mm = None
# working_voxelsize_mm = [1.5, 1.5, 1.5]
working_voxelsize_mm = [1.3, 1.3, 1.3]
# working_voxelsize_mm = [1.7, 1.7, 1.7]
# working_voxelsize_mm = "orig*2"
# working_voxelsize_mm=[2, 2, 2]
# working_voxelsize_mm=[2.2, 2.5, 2.5]
fname = "exp062-multiscale_delme.csv"
fnamenew = "msgc_experiment_ct.csv"
rnd_seed=1
dpi = 400
In [ ]:
In [2]:
# dry_run = True
dry_run = False
force_rewrite = False
# force_rewrite = True
In [4]:
# block size bylo 10
segparams0 = {
'method':'graphcut',
# 'method':'multiscale_graphcut',
'use_boundary_penalties': True,
'boundary_dilatation_distance': 2,
'boundary_penalties_weight': 1,
'block_size': 10,
'tile_zoom_constant': 1
}
segparams1 = {
# 'method':'graphcut',
'method':'multiscale_graphcut_hi2lo',
'use_boundary_penalties': True,
'boundary_dilatation_distance': 2,
'boundary_penalties_weight': 1,
'block_size': 10,
'tile_zoom_constant': 1
}
segparams2 = {
# 'method':'graphcut',
'method':'multiscale_graphcut_lo2hi',
'use_boundary_penalties': True,
'boundary_dilatation_distance': 2,
'boundary_penalties_weight': 1,
'block_size': 10,
'tile_zoom_constant': 1
}
labels = [
"ssgc ",
"msgc_hi2lo ",
"msgc_lo2hi ",
]
In [5]:
data_seeds_path = Path(io3d.datasets.join_path("medical", "orig", "ircad1b_seeds", get_root=True))
d01_pth = data_seeds_path / "ircadb1-01.pklz"
datap = io3d.read(d01_pth)
datap
str(d01_pth)
datap.keys()
Out[5]:
In [6]:
# io3d.write(datap, data_seeds_path / "ircad1b01.hdf5")
# io3d.read(data_seeds_path / "ircad1b01.hdf5")
In [7]:
# datap['saved_seeds']["left_kidney"]
In [8]:
# pth_data3d = Path(io3d.datasets.join_path("medical", "orig", "3Dircadb1.{}", "PATIENT_DICOM", get_root=True))
# pth_ground_true = Path(io3d.datasets.join_path("medical", "orig", "3Dircadb1.{}", "MASKS_DICOM", "{}" get_root=True))
# pth_seeds = Path(io3d.datasets.join_path("medical", "orig", "ircad1b_seeds", "ircad1b{:02d}.pklz", get_root=True))
# print(pth_data3d)
# print(pth_seeds)
In [9]:
# import imma
# help(imma.image_manipulation.resize_to_mm)
In [10]:
def prepare_data(i, seeds_key):
ground_true_key = seeds_key.replace("_", "")
pth_data3d = Path(io3d.datasets.join_path("medical", "orig", "3Dircadb1.{}", "PATIENT_DICOM", get_root=True))
pth_ground_true = Path(io3d.datasets.join_path("medical", "orig", "3Dircadb1.{}", "MASKS_DICOM", "{}", get_root=True))
pth_seeds = Path(io3d.datasets.join_path("medical", "orig", "ircad1b_seeds", "ircadb1-{:02d}.pklz", get_root=True))
datap_data3d = io3d.read(str(pth_data3d).format(i))
datap_seeds = io3d.read(str(pth_seeds).format(i))
datap_ground_true = io3d.read(str(pth_ground_true).format(i, ground_true_key))
seeds = datap_seeds["saved_seeds"][seeds_key]
data3d = datap_data3d["data3d"]
seg_true = datap_ground_true["data3d"]
vs = datap_data3d["voxelsize_mm"]
if working_voxelsize_mm is not None:
if working_voxelsize_mm == "orig*2":
wvs = np.asarray(vs) * 2
else:
wvs = working_voxelsize_mm
data3d = imma.image_manipulation.resize_to_mm(data3d, vs, wvs)
seg_true = imma.image_manipulation.resize_to_mm(seg_true, vs, wvs, order=0)
seeds = imma.image_manipulation.resize_to_mm(seeds, vs, wvs, order=0)
return data3d, seg_true, seeds, wvs, vs
In [18]:
def to_latex_file(df, fn):
with open(fn, "w") as f:
f.write(df.to_latex())
def latex_float(f, precision=4):
float_str = "{0:." + str(int(precision)) + "g}"
float_str = float_str.format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
def float_to_latex_file(fl, fn, precision=4):
string = latex_float(fl, precision=precision)
with open(fn, "w") as f:
f.write(string)
def num2latex(num, filename=None, precision=4):
if type(num) is str:
float_str = num
else:
float_str = "{0:." + str(int(precision)) + "g}"
float_str = float_str.format(num)
if float_str[:4] == r"\num":
pass
else:
float_str = "\\num{" + float_str + "}"
if filename is not None:
with open(filename, "w") as f:
f.write(float_str)
return float_str
def to_file(text, fn):
with open(fn, "w") as f:
f.write(text)
In [19]:
def process_gc_stats(stats1, prefix=None):
if prefix is None:
prefix = ""
outstats = {}
for key in stats1:
outstats[prefix + key] = stats1[key]
outstats[prefix + "nlinks number"] = np.sum(np.asarray(outstats[prefix + "nlinks shape"]), axis=0)[0]
outstats[prefix + "tlinks number"] = np.sum(np.asarray(outstats[prefix + "tlinks shape"]), axis=0)[0]
outstats.pop(prefix + "tlinks shape")
outstats.pop(prefix + "nlinks shape")
outstats[prefix + "edge number"] = outstats[prefix + "nlinks number"] + outstats[prefix + "tlinks number"]
return outstats
def merge_stats(stats0, stats1, stats2, labels=None):
if labels is None:
labels = [""] * 3
stats0 = process_gc_stats(stats0, labels[0])
stats1 = process_gc_stats(stats1, labels[1])
stats2 = process_gc_stats(stats2, labels[2])
stats = {}
stats.update(stats0)
stats.update(stats1)
stats.update(stats2)
return stats
def run_gc_with_defined_setup(img, segparams, seeds, true_seg, dry_run=False):
start = time.time()
gc = pycut.ImageGraphCut(img, segparams=segparams)
gc.set_seeds(seeds)
if dry_run:
thr = np.mean([np.min(img), np.max(img)])
sg1 = (img < thr).astype(np.uint8)
stats1 = {"nlinks shape": [[5, 5]], "tlinks shape": [[5, 5]]}
else:
gc.run()
sg1 = gc.segmentation
stats1 = gc.stats
elapsed1 = (time.time() - start)
err1 = np.sum(np.abs(true_seg - (1 - sg1)))
stats1["time"] = elapsed1
stats1["error"] = err1
stats1["data segmentation size px"] = np.sum(sg1 > 0)
return stats1
def add_data_and_algoritm_info(stats, data_params_dict, segparams, start, true_segmentation, voxelsize_mm, orig_vs_mm):
# stats['msgc time'] = elapsed1
# stats['normal time'] = elapsed2
# stats['data id'] = data_params[0]
# stats['data offset'] = data_params[1]
# stats['target organ'] = data_params[1]
# stats['data radius'] = data_params[2]
# stats['data size'] = data_params[0]
stats.update(data_params_dict)
stats["data size 0"] = true_segmentation.shape[0]
stats["data size 1"] = true_segmentation.shape[1]
stats["data size 2"] = true_segmentation.shape[2]
stats["data size px"] = np.prod(true_segmentation.shape)
stats["data target size px"] = np.sum(true_segmentation > 0)
stats["data voxesize mm^3"] = np.prod(voxelsize_mm)
stats["data voxesize mm 0"] = voxelsize_mm[0]
stats["data voxesize mm 1"] = voxelsize_mm[1]
stats["data voxesize mm 2"] = voxelsize_mm[2]
stats["data orig voxesize mm 0"] = orig_vs_mm[0]
stats["data orig voxesize mm 1"] = orig_vs_mm[1]
stats["data orig voxesize mm 2"] = orig_vs_mm[2]
stats["block size"] = segparams["block_size"]
# stats["data seedsz"] = data_params[3]
# stats["GC error"] = err2
# stats["MSGC error"] = err1
stats['machine hostname'] = machine_hostname
stats['experiment iteration start time'] = start
return stats
def add_data_seaborn(stats, data_params_dict, segparams, start, i, label, true_segmentation, voxelsize_mm, orig_vs_mm):
stats = process_gc_stats(stats, "")
stats = add_data_and_algoritm_info(stats, data_params_dict, segparams, start, true_segmentation, voxelsize_mm, orig_vs_mm)
stats["method"] = label
dfinew = pd.DataFrame(stats, index=[i*3 + 0])
#dfnew = dfnew.append(dfinew, sort=True)
return dfinew