Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improving normalization by reducing memory allocations #1889

Merged
merged 1 commit into from
Jan 18, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
here seg is used to store the zero valued region. The value for that region in the segmentation is -1 by
default.
"""
image = image.astype(self.target_dtype)
image = image.astype(self.target_dtype, copy=False)
if self.use_mask_for_norm is not None and self.use_mask_for_norm:
# negative values in the segmentation encode the 'outside' region (think zero values around the brain as
# in BraTS). We want to run the normalization only in the brain region, so we need to mask the image.
Expand All @@ -45,7 +45,8 @@ def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
else:
mean = image.mean()
std = image.std()
image = (image - mean) / (max(std, 1e-8))
image -= mean
image /= (max(std, 1e-8))
return image


Expand All @@ -54,30 +55,32 @@ class CTNormalization(ImageNormalization):

def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
assert self.intensityproperties is not None, "CTNormalization requires intensity properties"
image = image.astype(self.target_dtype)
mean_intensity = self.intensityproperties['mean']
std_intensity = self.intensityproperties['std']
lower_bound = self.intensityproperties['percentile_00_5']
upper_bound = self.intensityproperties['percentile_99_5']
image = np.clip(image, lower_bound, upper_bound)
image = (image - mean_intensity) / max(std_intensity, 1e-8)

image = image.astype(self.target_dtype, copy=False)
np.clip(image, lower_bound, upper_bound, out=image)
image -= mean_intensity
image /= max(std_intensity, 1e-8)
return image


class NoNormalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False

def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
return image.astype(self.target_dtype)
return image.astype(self.target_dtype, copy=False)


class RescaleTo01Normalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False

def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
image = image.astype(self.target_dtype)
image = image - image.min()
image = image / np.clip(image.max(), a_min=1e-8, a_max=None)
image = image.astype(self.target_dtype, copy=False)
image -= image.min()
image /= np.clip(image.max(), a_min=1e-8, a_max=None)
return image


Expand All @@ -89,7 +92,7 @@ def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
"Your images do not seem to be RGB images"
assert image.max() <= 255, "RGB images are uint 8, for whatever reason I found pixel values greater than 255" \
". Your images do not seem to be RGB images"
image = image.astype(self.target_dtype)
image = image / 255.
image = image.astype(self.target_dtype, copy=False)
image /= 255.
return image

Loading