From ecf30b90b47b1ab9842ffec0900d0c8d121edc70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sophia=20M=C3=A4dler?= <15019107+sophiamaedler@users.noreply.github.com> Date: Tue, 4 Jun 2024 13:59:21 +0200 Subject: [PATCH 1/2] add function to get downsampling parameters --- src/sparcscore/pipeline/workflows.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/sparcscore/pipeline/workflows.py b/src/sparcscore/pipeline/workflows.py index b61b28d0..665c2130 100644 --- a/src/sparcscore/pipeline/workflows.py +++ b/src/sparcscore/pipeline/workflows.py @@ -1057,6 +1057,7 @@ def _finalize_segmentation_results(self, size_padding): channels = np.stack(required_maps).astype(np.uint16) _seg_size = self.maps["nucleus_segmentation"].shape + self.log( f"Segmentation size after downsampling before resize to original dimensions: {_seg_size}" ) @@ -1064,28 +1065,24 @@ def _finalize_segmentation_results(self, size_padding): # rescale downsampled segmentation results to original size by repeating pixels _, x, y = size_padding + N, smoothing_kernel_size = _get_downsampling_parameters() + nuc_seg = self.maps["nucleus_segmentation"] - nuc_seg = nuc_seg.repeat(self.config["downsampling_factor"], axis=0).repeat( - self.config["downsampling_factor"], axis=1 - ) + nuc_seg = nuc_seg.repeat(N, axis=0).repeat(N, axis=1) cyto_seg = self.maps["cytosol_segmentation"] - cyto_seg = cyto_seg.repeat(self.config["downsampling_factor"], axis=0).repeat( - self.config["downsampling_factor"], axis=1 - ) + cyto_seg = cyto_seg.repeat(N, axis=0).repeat(N, axis=1) # perform erosion and dilation for smoothing - nuc_seg = erosion(nuc_seg, footprint=disk(self.config["smoothing_kernel_size"])) + nuc_seg = erosion(nuc_seg, footprint=disk(smoothing_kernel_size)) nuc_seg = dilation( - nuc_seg, footprint=disk(self.config["smoothing_kernel_size"]) - ) + nuc_seg, footprint=disk(smoothing_kernel_size + 1) + ) # dilate 1 more than eroded to ensure that we do not lose any pixels - cyto_seg = erosion( - cyto_seg, footprint=disk(self.config["smoothing_kernel_size"]) - ) + cyto_seg = erosion(cyto_seg, footprint=disk(smoothing_kernel_size)) cyto_seg = dilation( - cyto_seg, footprint=disk(self.config["smoothing_kernel_size"]) - ) + cyto_seg, footprint=disk(smoothing_kernel_size + 1) + ) # dilate 1 more than eroded to ensure that we do not lose any pixels # combine masks into one stack segmentation = np.stack([nuc_seg, cyto_seg]).astype(np.uint32) From 643960d2a059445ea1ac168f78f5f8245233960b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sophia=20M=C3=A4dler?= <15019107+sophiamaedler@users.noreply.github.com> Date: Tue, 4 Jun 2024 14:00:28 +0200 Subject: [PATCH 2/2] implement sanity check to catch cases #4 --- src/sparcscore/pipeline/workflows.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/sparcscore/pipeline/workflows.py b/src/sparcscore/pipeline/workflows.py index 665c2130..ee0dacd6 100644 --- a/src/sparcscore/pipeline/workflows.py +++ b/src/sparcscore/pipeline/workflows.py @@ -1068,9 +1068,13 @@ def _finalize_segmentation_results(self, size_padding): N, smoothing_kernel_size = _get_downsampling_parameters() nuc_seg = self.maps["nucleus_segmentation"] + n_nuclei = len( + np.unique(nuc_seg)[0] + ) # get number of objects in mask for sanity checking nuc_seg = nuc_seg.repeat(N, axis=0).repeat(N, axis=1) cyto_seg = self.maps["cytosol_segmentation"] + n_cytosols = len(np.unique(cyto_seg)[0]) cyto_seg = cyto_seg.repeat(N, axis=0).repeat(N, axis=1) # perform erosion and dilation for smoothing @@ -1084,6 +1088,23 @@ def _finalize_segmentation_results(self, size_padding): cyto_seg, footprint=disk(smoothing_kernel_size + 1) ) # dilate 1 more than eroded to ensure that we do not lose any pixels + # sanity check to make sure that smoothing does not remove masks + if len(np.unique(nuc_seg)[0]) != n_nuclei: + self.log( + "Error. Number of nuclei in segmentation mask changed after smoothing. This should not happen. Ensure that you have chosen adequate smoothing parameters or use the defaults." + ) + sys.exit( + "Error. Number of nuclei in segmentation mask changed after smoothing. This should not happen. Ensure that you have chosen adequate smoothing parameters or use the defaults." + ) + + if len(np.unique(cyto_seg)[0]) != n_cytosols: + self.log( + "Error. Number of cytosols in segmentation mask changed after smoothing. This should not happen. Ensure that you have chosen adequate smoothing parameters or use the defaults." + ) + sys.exit( + "Error. Number of cytosols in segmentation mask changed after smoothing. This should not happen. Ensure that you have chosen adequate smoothing parameters or use the defaults." + ) + # combine masks into one stack segmentation = np.stack([nuc_seg, cyto_seg]).astype(np.uint32) del cyto_seg, nuc_seg