|
|
|
from pathlib import Path |
|
import argparse |
|
import nibabel as nib |
|
import numpy as np |
|
import os |
|
import multiprocessing |
|
from time import time |
|
import logging |
|
import traceback |
|
from scipy.ndimage import label as ndi_label, sum as ndi_sum |
|
from nibabel.orientations import io_orientation, axcodes2ornt, ornt_transform |
|
from scipy.ndimage import label as ndi_label, sum as ndi_sum, gaussian_filter |
|
|
|
|
|
logging.basicConfig(level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s') |
|
|
|
|
|
input_labels_map = { |
|
"spine_to_vb": { |
|
"labels-spine": {1:0,2:0,3:0,4:0,5:0,6:0,7:0, |
|
8:1,9:2,10:3,11:4,12:5,13:6,14:7,15:8,16:9,17:10,18:11,19:12, |
|
20:13,21:14,22:15,23:16,24:17,25:18, |
|
26:19,27:20,28:21,}, |
|
"labels-bodyregions": {11:1}, |
|
"labels-spinalcord":{1:1,79:1} |
|
}, |
|
} |
|
|
|
|
|
class ProcessLoader: |
|
def __init__(self, input_root, method): |
|
self.root = input_root |
|
self.method = method |
|
self.labels_map = input_labels_map[method] |
|
logging.info(f"Initializing method `{method}`, loading relevant label map") |
|
|
|
|
|
def spine_to_vb(self, case): |
|
output_np = case.fetch_label('labels-spine') |
|
myelon_np = case.fetch_label('labels-bodyregions') |
|
myelon2_np = case.fetch_label('labels-spinalcord') |
|
|
|
|
|
myelon_np[myelon2_np > 0] = 1 |
|
|
|
|
|
sacrum_np = output_np==19 |
|
|
|
|
|
for slice in range(myelon_np.shape[2]): |
|
slice_np = myelon_np[:,:,slice] |
|
if np.sum(slice_np) > 0: |
|
com_max = np.max(np.where(slice_np > 0), axis=1)[1] |
|
output_np[:,:com_max,slice] = 0 |
|
|
|
output_np[sacrum_np] = 19 |
|
|
|
|
|
labels = np.unique(output_np)[1:] |
|
for label in labels: |
|
mask = output_np == label |
|
if np.any(mask): |
|
labeled, num_labels = ndi_label(mask) |
|
sizes = ndi_sum(mask, labeled, index=range(1, num_labels+1)) |
|
largest_label = np.argmax(sizes) + 1 |
|
output_np[(labeled != largest_label) & mask] = 0 |
|
|
|
|
|
for label in labels: |
|
mask = output_np == label |
|
if np.any(mask): |
|
np.invert(mask, out=mask) |
|
labeled, num_labels = ndi_label(mask) |
|
if num_labels <= 1: |
|
continue |
|
sizes = ndi_sum(mask, labeled, index=range(1, num_labels+1)) |
|
largest_label = np.argmax(sizes) + 1 |
|
mask[labeled != largest_label] = 0 |
|
np.invert(mask, out=mask) |
|
output_np[mask] = label |
|
|
|
|
|
output_np_smoothed = np.zeros_like(output_np) |
|
for label in labels: |
|
mask = output_np == label |
|
if np.any(mask): |
|
smoothed_mask = gaussian_filter(mask.astype(float), sigma=1) > 0.5 |
|
output_np_smoothed[smoothed_mask] = label |
|
|
|
return (output_np_smoothed, 'labels-vb') |
|
|
|
|
|
def __call__(self, input_images_file): |
|
time_start = time() |
|
worker_name = multiprocessing.current_process().name |
|
logging.debug(f"Processing `{input_images_file}` @{worker_name}") |
|
|
|
try: |
|
|
|
case = CaseLoader(self.root, input_images_file, self.labels_map) |
|
|
|
|
|
output_np, output_dir = getattr(self, self.method)(case) |
|
|
|
|
|
if output_np is False or output_np.size == 0: |
|
raise ValueError(f"no output available, skipping") |
|
if not output_dir: |
|
raise RuntimeError(f"directory for output was undefined") |
|
|
|
|
|
os.makedirs(self.root / output_dir, exist_ok=True) |
|
output_path = self.root / output_dir / input_images_file |
|
output_nib = nib.Nifti1Image(output_np, case.image_reoriented_affine) |
|
|
|
|
|
affine_transformer = ornt_transform(axcodes2ornt("RAS"), |
|
io_orientation(case.image_original_affine)) |
|
output_nib = output_nib.as_reoriented(affine_transformer) |
|
if not np.allclose(case.image_original_affine, output_nib.affine): |
|
raise ValueError(f'Affine transformation failed: \n {case.image_original_affine} != \n {output_nib.affine}') |
|
|
|
nib.save(output_nib, output_path) |
|
logging.debug(f" saved `{output_path}` ({time()-time_start:.2f}s)") |
|
logging.info(f"{input_images_file} finished @{worker_name} ({time()-time_start:.2f}s)") |
|
|
|
except Exception as e: |
|
logging.warning(f"{input_images_file} failed:\n {e}\n {traceback.format_exc()}\n") |
|
|
|
|
|
class CaseLoader: |
|
def __init__(self, input_root, input_images_file, input_labels_map): |
|
|
|
|
|
input_path = input_root / 'images' / input_images_file |
|
if not os.path.exists(input_path): |
|
ValueError(f"{input_path} not available") |
|
input_original_nib = nib.load(input_path) |
|
input_reoriented_nib = nib.as_closest_canonical(input_original_nib) |
|
input_reoriented_np = input_reoriented_nib.get_fdata().astype(np.float32) |
|
logging.debug(f" loaded input `{input_path}`") |
|
|
|
|
|
self.image_reoriented_np = input_reoriented_np |
|
self.image_reoriented_shape = input_reoriented_np.shape |
|
self.image_reoriented_affine = input_reoriented_nib.affine |
|
self.image_reoriented_zooms = input_reoriented_nib.header.get_zooms() |
|
self.image_original_affine = input_original_nib.affine |
|
self.input_images_file = input_images_file |
|
self.root = input_root |
|
self.labels_map = input_labels_map |
|
|
|
def fetch_label(self, label): |
|
|
|
label_path = self.root / label / self.input_images_file |
|
label_original_nib = nib.load(label_path) |
|
label_reoriented_nib = nib.as_closest_canonical(label_original_nib) |
|
label_reoriented_np = label_reoriented_nib.get_fdata().astype(np.uint8) |
|
|
|
|
|
if not np.allclose(self.image_reoriented_affine, label_reoriented_nib.affine, rtol=1e-03, atol=1e-04): |
|
raise ValueError(f"affine matrices of input and label {label} do not match:\n{self.image_reoriented_affine}\n{label_reoriented_nib.affine}\n{self.image_reoriented_affine-label_reoriented_nib.affine}") |
|
if not np.array_equal(self.image_reoriented_shape, label_reoriented_np.shape): |
|
raise ValueError(f"shapes of input and label {label} do not match: {self.image_reoriented_shape} vs {label_reoriented_np.shape}") |
|
|
|
|
|
labels = self.labels_map[label] |
|
labels_max = max(max(labels.keys()), np.max(label_reoriented_np)) |
|
relabel_array = np.zeros(labels_max+1, dtype=np.uint8) |
|
for key, value in labels.items(): |
|
relabel_array[key] = value |
|
|
|
|
|
label_reoriented_np = relabel_array[label_reoriented_np] |
|
|
|
logging.debug(f" loaded label `{label_path}` using {len(labels)} labels") |
|
return label_reoriented_np |
|
|
|
|
|
def main(input_root, input_prefix, method): |
|
time_start = time() |
|
|
|
|
|
logging.info('PIPELINE: MASK PROCESSING') |
|
logging.info(f'input root directory: `{input_root}`') |
|
logging.info(f'method: `{method}`') |
|
logging.info(f'input prefix: `{input_prefix}`') |
|
|
|
|
|
if not os.path.exists(input_root): |
|
raise ValueError(f'Input root directory `{input_root}` does not exist.') |
|
if method not in input_labels_map.keys(): |
|
raise ValueError(f"Method `{method}` not available.") |
|
|
|
|
|
input_images_dir = input_root / "images" |
|
input_images_files = [file.name for file in input_images_dir.glob(input_prefix + '*.nii.gz')] |
|
logging.info(f"{len(input_images_files)} input images identified") |
|
|
|
|
|
input_labels_required = input_labels_map[method] |
|
logging.info(f"{len(input_labels_required)} labels required: {', '.join(input_labels_required.keys())}") |
|
|
|
|
|
input_images_files = [file for file in input_images_files if all((input_root / label / file).exists() for label in input_labels_required.keys())] |
|
logging.info(f"{len(input_images_files)} complete cases (all required labels available) identified") |
|
|
|
|
|
process = ProcessLoader(input_root, method) |
|
n_processes = min(multiprocessing.cpu_count(), len(input_images_files)) |
|
|
|
|
|
logging.info(f"spawn processes at {n_processes}/{multiprocessing.cpu_count()} CPUs\n") |
|
with multiprocessing.Pool(processes=n_processes, maxtasksperchild=20) as p: |
|
p.map(process, input_images_files, chunksize=min(5, n_processes)) |
|
|
|
|
|
logging.info(f"FINISHED PIPELINE ({len(input_images_files)} cases in {time()-time_start:.2f}s)") |
|
|
|
|
|
if __name__ == "__main__": |
|
""" |
|
Toolkit for label manipulation, combination and agregation. |
|
requires directories images/, and directories corresponding to the labels as specified in input_labels_map within the input root directory. |
|
|
|
Usage: |
|
suppl/4_MaskEdits.py -i /Volumes/path/to/main/ -m spine_to_vb |
|
""" |
|
|
|
parser = argparse.ArgumentParser(description="toolkit to combine and manipulate masks.") |
|
parser.add_argument("-i", "--input", metavar="Input root directory", dest="input_root", |
|
help="Root Directory", |
|
type=lambda p: Path(p).absolute(), required=True) |
|
parser.add_argument("-b", "--batch", metavar="Prefix of inputs", dest="input_prefix", |
|
help="Prefix of input files to be processed", |
|
type=str, required=False, default="") |
|
parser.add_argument("-m", "--method", metavar="Method", dest="method", |
|
help="The method / pipeline used for mask processing", |
|
type=str, choices=["spine_to_vb"], required=True) |
|
args = parser.parse_args() |
|
|
|
main(input_root=args.input_root, input_prefix=args.input_prefix, method=args.method) |