Skip to content

Commit

Permalink
a .txt file is now allowed as input
Browse files Browse the repository at this point in the history
  • Loading branch information
Malte Jensen committed May 7, 2024
1 parent b5240ab commit 6a48ea2
Show file tree
Hide file tree
Showing 9 changed files with 367 additions and 116 deletions.
14 changes: 7 additions & 7 deletions bin/C2C-slurm
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,25 @@ def python_submit(command, node=None):

try:
if node is None:
command = "sbatch --ntasks=1 --cpus-per-task=1 --output ./slurm/slurm-%j.out \
--mem-per-cpu=8G -p gpu --gpus 1 --time=1:00:00 slurm.sh"
command = "sbatch --ntasks=1 --cpus-per-task=8 --output ./slurm/slurm-%j.out \
--mem-per-cpu=3G -p gpu --gpus 1 --time=1:00:00 slurm.sh"
submit_command(command)
print(f'Submitted the command --- "{command}" --- to slurm.')
else:
command = f"sbatch --ntasks=1 --cpus-per-task=1 --output ./slurm/slurm-%j.out \
--nodelist={node} --mem-per-cpu=8G -p gpu --gpus 1 --time=1:00:00 slurm.sh"
command = f"sbatch --ntasks=1 --cpus-per-task=8 --output ./slurm/slurm-%j.out \
--nodelist={node} --mem-per-cpu=3G -p gpu --gpus 1 --time=1:00:00 slurm.sh"
submit_command(command)
print(f'Submitted the command --- "{command}" --- to slurm.')
except subprocess.CalledProcessError:
if node == None:
command = f"sbatch -c 8 --gres=gpu:1 --output ./slurm/slurm-%j.out --mem=128000 --time=100-00:00:00 slurm.sh "
command = f"sbatch -c 8 --gres=gpu:1 --output ./slurm/slurm-%j.out --mem=50gb --time=100-00:00:00 slurm.sh "
submit_command(command)
print(f'Submitted the command --- "{command}" --- to slurm.')
else:
command = f"sbatch -c 8 --gres=gpu:1 --output ./slurm/slurm-%j.out --nodelist={node} --mem=128000 --time=100-00:00:00 slurm.sh"
command = f"sbatch -c 8 --gres=gpu:1 --output ./slurm/slurm-%j.out --nodelist={node} --mem=50gb --time=100-00:00:00 slurm.sh"
submit_command(command)
print(f'Submitted the command --- "{command}" --- to slurm.')
os.remove("./slurm.sh")


python_submit(command)
python_submit(command, node='siena')
12 changes: 11 additions & 1 deletion comp2comp/aortic_calcium/aortic_calcium.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,10 @@ def __call__(self, inference_pipeline):
return {}

def CalculateAgatsonScore(self, calc_mask_region, ct, pix_dims):
'''
Original Agatson papers says need to be >= 1mm^2, other papers
use at least 3 face-linked pixels.
'''
def get_hu_factor(max_hu):
# if max_hu ><
if max_hu < 200:
Expand All @@ -548,6 +552,12 @@ def get_hu_factor(max_hu):

for j in range(1, num_lesions + 1):
tmp_mask = labelled_calc == j
agatson += tmp_mask.sum() * area_per_pixel * get_hu_factor(tmp_ct_slice[tmp_mask].max())

tmp_area = tmp_mask.sum() * area_per_pixel
# exclude if less than 1 mm^2
if tmp_area <= 1:
continue
else:
agatson += tmp_area * get_hu_factor(tmp_ct_slice[tmp_mask].max())

return agatson
2 changes: 1 addition & 1 deletion comp2comp/inference_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __call__(self, inference_pipeline=None, **kwargs):
print(f"({i + 1}) {inference_class.__repr__()}")
print("")

print("Starting inference pipeline.\n")
print("Starting inference pipeline for:\n")

if inference_pipeline:
for key, value in kwargs.items():
Expand Down
3 changes: 3 additions & 0 deletions comp2comp/io/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import dosma as dm
import pydicom
import SimpleITK as sitk
import nibabel as nib

from comp2comp.inference_class_base import InferenceClass

Expand Down Expand Up @@ -113,6 +114,8 @@ def __call__(self, inference_pipeline):
os.path.join(segmentations_output_dir, "converted_dcm.nii.gz"),
)

inference_pipeline.medical_volume = nib.load(os.path.join(segmentations_output_dir, "converted_dcm.nii.gz"))

return {}


Expand Down
24 changes: 13 additions & 11 deletions comp2comp/io/io_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,18 +47,20 @@ def get_dicom_or_nifti_paths_and_num(path):
Returns:
list: List of paths.
"""
if path.endswith(".nii") or path.endswith(".nii.gz"):
return [(path, 1)]
dicom_nifti_paths = []
for root, dirs, files in os.walk(path):
if len(files) > 0:
# if all(file.endswith(".dcm") or file.endswith(".dicom") for file in files):
dicom_nifti_paths.append((root, len(files)))
# else:
# for file in files:
# if file.endswith(".nii") or file.endswith(".nii.gz"):
# num_slices = 450
# dicom_nifti_paths.append((os.path.join(root, file), num_slices))

if path.endswith(".nii") or path.endswith(".nii.gz"):
dicom_nifti_paths.append([(path, 1)])
elif path.endswith('.txt'):
dicom_nifti_paths = []
with open(path, 'r') as f:
for dicom_folder_path in f:
dicom_nifti_paths.append( (dicom_folder_path.strip(), len(os.listdir(dicom_folder_path.strip()))) )
else:
for root, dirs, files in os.walk(path):
if len(files) > 0:
# if all(file.endswith(".dcm") or file.endswith(".dicom") for file in files):
dicom_nifti_paths.append((root, len(files)))

return dicom_nifti_paths

Expand Down
120 changes: 79 additions & 41 deletions comp2comp/liver_spleen_pancreas/liver_spleen_pancreas.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
nostdout,
setup_nnunet,
)

from totalsegmentatorv2.python_api import totalsegmentator
from comp2comp.inference_class_base import InferenceClass


Expand All @@ -25,18 +25,17 @@ def __call__(self, inference_pipeline):
self.output_dir_segmentations = os.path.join(self.output_dir, "segmentations/")
if not os.path.exists(self.output_dir_segmentations):
os.makedirs(self.output_dir_segmentations)

self.model_dir = inference_pipeline.model_dir

mv, seg = self.organ_seg(
seg = self.organ_seg(
os.path.join(self.output_dir_segmentations, "converted_dcm.nii.gz"),
self.output_dir_segmentations + "organs.nii.gz",
inference_pipeline.model_dir,
)

inference_pipeline.segmentation = seg
inference_pipeline.medical_volume = mv


return {}

def organ_seg(
Expand All @@ -53,43 +52,82 @@ def organ_seg(
st = time()
os.environ["SCRATCH"] = self.model_dir

# Setup nnunet
model = "3d_fullres"
folds = [0]
trainer = "nnUNetTrainerV2_ep4000_nomirror"
crop_path = None
task_id = [251]

setup_nnunet()
download_pretrained_weights(task_id[0])

from totalsegmentator.nnunet import nnUNet_predict_image
seg = totalsegmentator(
input = input_path,
output = output_path,
# input = os.path.join(self.output_dir_segmentations, "converted_dcm.nii.gz"),
# output = os.path.join(self.output_dir_segmentations, "segmentation.nii"),
task_ids = [291],
ml = True,
nr_thr_resamp = 1,
nr_thr_saving = 6,
fast = False,
nora_tag = "None",
preview = False,
task = "total",
# roi_subset = [
# "vertebrae_T12",
# "vertebrae_L1",
# "vertebrae_L2",
# "vertebrae_L3",
# "vertebrae_L4",
# "vertebrae_L5",
# ],
roi_subset = None,
statistics = False,
radiomics = False,
crop_path = None,
body_seg = False,
force_split = False,
output_type = "nifti",
quiet = False,
verbose = False,
test = 0,
skip_saving = True,
device = "gpu",
license_number = None,
statistics_exclude_masks_at_border = True,
no_derived_masks = False,
v1_order = False,
)

with nostdout():
seg, mvs = nnUNet_predict_image(
input_path,
output_path,
task_id,
model=model,
folds=folds,
trainer=trainer,
tta=False,
multilabel_image=True,
resample=1.5,
crop=None,
crop_path=crop_path,
task_name="total",
nora_tag="None",
preview=False,
nr_threads_resampling=1,
nr_threads_saving=6,
quiet=False,
verbose=True,
test=0,
)
# Setup nnunet
# model = "3d_fullres"
# folds = [0]
# trainer = "nnUNetTrainerV2_ep4000_nomirror"
# crop_path = None
# task_id = [251]

# setup_nnunet()
# download_pretrained_weights(task_id[0])

# from totalsegmentator.nnunet import nnUNet_predict_image

# with nostdout():
# seg, mvs = nnUNet_predict_image(
# input_path,
# output_path,
# task_id,
# model=model,
# folds=folds,
# trainer=trainer,
# tta=False,
# multilabel_image=True,
# resample=1.5,
# crop=None,
# crop_path=crop_path,
# task_name="total",
# nora_tag="None",
# preview=False,
# nr_threads_resampling=1,
# nr_threads_saving=6,
# quiet=False,
# verbose=True,
# test=0,
# )
end = time()

# Log total time for spine segmentation
print(f"Total time for organ segmentation: {end-st:.2f}s.")

return seg, mvs
return seg
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
generate_liver_spleen_pancreas_report,
generate_slice_images,
)

import nibabel as nib
import numpy as np

class LiverSpleenPancreasVisualizer(InferenceClass):
def __init__(self):
Expand All @@ -22,19 +23,40 @@ def __init__(self):
"Median": "HU",
}

self.class_nums = [1, 5, 10]
# self.class_nums = [1, 5, 10]
self.class_nums = [5, 1, 7]
self.organ_names = ["liver", "spleen", "pancreas"]

def __call__(self, inference_pipeline):
self.output_dir = inference_pipeline.output_dir
self.output_dir_images_organs = os.path.join(self.output_dir, "images/")
inference_pipeline.output_dir_images_organs_organs_organs = (
inference_pipeline.output_dir_images_organs = (
self.output_dir_images_organs
)

if not os.path.exists(self.output_dir_images_organs):
os.makedirs(self.output_dir_images_organs)


# make folder for volumes
self.output_dir_volumes = os.path.join(self.output_dir, "volumes/")
if not os.path.exists(self.output_dir_volumes):
os.makedirs(self.output_dir_volumes)

# save the volume to disk in nifti format
nib.save(inference_pipeline.medical_volume, os.path.join(self.output_dir_volumes, 'ct.nii.gz'))

segmentation_subset = np.zeros(inference_pipeline.medical_volume.shape, dtype=np.int8)
tmp_seg = inference_pipeline.segmentation.get_fdata().astype(np.int8)

for i, c in enumerate(self.class_nums, start=1):
segmentation_subset[tmp_seg == c] = i

inference_pipeline.saveArrToNifti(
segmentation_subset,
os.path.join(self.output_dir_volumes,
"liver_spleen_pancreas_mask.nii.gz")
)

inference_pipeline.medical_volume_arr = np.flip(
inference_pipeline.medical_volume.get_fdata(), axis=1
)
Expand All @@ -61,7 +83,7 @@ def __call__(self, inference_pipeline):
)

inference_pipeline.organ_metrics = self.organ_metrics

generate_liver_spleen_pancreas_report(
self.output_dir_images_organs, self.organ_names
)
Expand Down
Loading

0 comments on commit 6a48ea2

Please sign in to comment.