Skip to content

Commit

Permalink
All StosBrute Tests pass
Browse files Browse the repository at this point in the history
  • Loading branch information
jamesra committed Nov 22, 2024
1 parent ba2d103 commit ee9c655
Show file tree
Hide file tree
Showing 11 changed files with 516 additions and 352 deletions.
35 changes: 25 additions & 10 deletions nornir_imageregistration/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

from collections.abc import Iterable
from logging import exception
import math
from multiprocessing import shared_memory
from multiprocessing.shared_memory import SharedMemory
Expand Down Expand Up @@ -215,12 +216,15 @@ def remove_duplicate_points(points: NDArray, columns=list[int]) -> tuple[NDArray
return sorted_point_pairs


def ReduceImage(image: NDArray, scalar: float) -> NDArray:
def ScaleImage(image: NDArray, scalar: float) -> NDArray:
"""
Returns a zoomed array using spline interpolation (CPU/GPU agnostic function)
Returns a scaled array using spline interpolation (CPU/GPU agnostic function)
"""
xp = cupyx.scipy.get_array_module(image)
return xp.ndimage.zoom(image, scalar)
if nornir_imageregistration.UsingCupy():
return xp.ndimage.zoom(image, scalar)
else:
return xp.ndimage.zoom(image.astype(np.float32), scalar)


def ExtractROI(image: NDArray, center, area) -> NDArray:
Expand Down Expand Up @@ -666,7 +670,11 @@ def close_shared_memory(input: nornir_imageregistration.Shared_Mem_Metadata | Sh
if input.shared_memory is not None:
input.shared_memory.close()
elif isinstance(input, SharedMemory):
input.close()
try:
input.close()
except Exception as e:
prettyoutput.LogErr(f"Error closing shared memory {input.name}\n{e}")
return

# if input.name in __known_shared_memory_allocations:
# shared_mem, finalizer = __known_shared_memory_allocations[input.name]
Expand Down Expand Up @@ -1078,7 +1086,7 @@ def LoadImage(ImageFullPath: str,
if not MaxDimension is None:
scalar = ScalarForMaxDimension(MaxDimension, image.shape)
if scalar < 1.0:
image = ReduceImage(image, scalar)
image = ScaleImage(image, scalar)

image_mask = None

Expand All @@ -1094,7 +1102,7 @@ def LoadImage(ImageFullPath: str,
if MaxDimension is not None:
scalar = ScalarForMaxDimension(MaxDimension, image_mask.shape)
if scalar < 1.0:
image_mask = ReduceImage(image_mask, scalar)
image_mask = ScaleImage(image_mask, scalar)

assert (image.shape == image_mask.shape)
image = RandomNoiseMask(image, image_mask)
Expand Down Expand Up @@ -1286,10 +1294,12 @@ def CreateExtremaMask(image: np.ndarray, mask: np.ndarray = None, size_cutoff=0.
"""
Returns a mask for features above a set size that are at max or min pixel value
:param image:
:param mask: Masked regions are excluded from the analysis, the min/max values are calculated from unmasked pixels only
:param minima:
:param maxima:
:param numpy.ndarray mask: Pixels we wish to not include in the analysis
:param size_cutoff: Determines how large a continuous region must be before it is masked. If 0 to 1 this is a fraction of total area. If > 1 it is an absolute count of pixels. If None all min/max are masked regardless of size
:returns: Mask of extrema pixels, pixels that are FALSE are extrema to be excluded
"""
# (minima, maxima, iMin, iMax) = scipy.ndimage.measurements.extrema(image)

Expand All @@ -1307,6 +1317,9 @@ def CreateExtremaMask(image: np.ndarray, mask: np.ndarray = None, size_cutoff=0.
if maxima is None:
maxima = image.max()

# Pixels that are TRUE will be excluded, exclude pixels equal to the min or max.
# However, the ndimage.label function finds features that are TRUE. So we start with an
# inverted mask
extrema_mask = xp.logical_or(image == maxima, image == minima)

if mask is not None:
Expand All @@ -1319,6 +1332,8 @@ def CreateExtremaMask(image: np.ndarray, mask: np.ndarray = None, size_cutoff=0.
if nLabels == 0: # If there are no labels, do not mask anything
return xp.ones(image.shape, extrema_mask.dtype)

# Identify the label of non-extrema pixels

label_sums = sp.ndimage.sum_labels(
extrema_mask.astype(np.int32) if nornir_imageregistration.UsingCupy() else extrema_mask, extrema_mask_label,
xp.array(range(0, nLabels)))
Expand All @@ -1334,16 +1349,16 @@ def CreateExtremaMask(image: np.ndarray, mask: np.ndarray = None, size_cutoff=0.
else:
cutoff_value = size_cutoff

labels_to_save = label_sums < cutoff_value
if xp.any(labels_to_save):
cutoff_labels = xp.flatnonzero(labels_to_save)
small_regions = label_sums < cutoff_value
if xp.any(small_regions):
cutoff_labels = xp.flatnonzero(small_regions)
extrema_mask_minus_small_features = xp.isin(extrema_mask_label, cutoff_labels)

# nornir_imageregistration.ShowGrayscale((image, extrema_mask, extrema_mask_minus_small_features))

return extrema_mask_minus_small_features
else:
raise NotImplemented()
return np.ones(image.shape, bool) # No features large enough to exclude, retain the entire image


def ReplaceImageExtremaWithNoise(image: np.ndarray, imagemask: np.ndarray = None,
Expand Down
2 changes: 1 addition & 1 deletion nornir_imageregistration/image_permutation_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def __init__(self,

self._extrema_size_cutoff_in_pixels = None
if extrema_mask_size_cuttoff is None:
extrema_mask_size_cuttoff = np.array((128, 128))
extrema_mask_size_cuttoff = 0.01

if isinstance(extrema_mask_size_cuttoff, np.ndarray):
self.extrema_size_cutoff_in_pixels = int(np.prod(extrema_mask_size_cuttoff))
Expand Down
2 changes: 2 additions & 0 deletions nornir_imageregistration/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from .grid_refinement import GridRefinement
from .mosaic_tile_offset import LoadMosaicOffsets, SaveMosaicOffsets, TileOffset
from .translate import TranslateSettings
from .angle_range import AngleSearchRange
from .stos_brute import StosBruteSettings


def GetOrSaveTranslateSettings(settings: TranslateSettings, path: str):
Expand Down
24 changes: 24 additions & 0 deletions nornir_imageregistration/settings/angle_range.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from numpy.typing import NDArray
import numpy as np
from pydantic import BaseModel
from math import pi


class AngleSearchRange(BaseModel):
max_angle: float | None = None # Maximum +/- deflection angle to rotate images when searching for the best control point alignment, if None, a full circle is searched at step-size intervals
angle_step_size: float = 3 # Number of degrees to step between search angles

@property
def angle_range(self) -> NDArray[float]:
if self.max_angle is None:
angles = np.arange(start=-180, stop=180, step=self.angle_step_size)
else:
angles = np.arange(start=-self.max_angle,
stop=self.max_angle + self.angle_step_size,
step=self.angle_step_size) # numpy.linspace(-7.5, 7.5, 11)

angles = np.union1d(angles, [0])
return angles

def __iter__(self):
return iter(self.angle_range)
21 changes: 11 additions & 10 deletions nornir_imageregistration/settings/grid_refinement.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
@author: u0490822
"""
from __future__ import annotations
from typing import Iterable, Sequence

import numpy as np
from numpy.typing import NDArray

import nornir_imageregistration


class GridRefinement(object):
class GridRefinement:
"""
Settings for grid refinement
"""
Expand Down Expand Up @@ -81,10 +82,10 @@ def __init__(self,
target_mask: NDArray[np.bool_] | None = None,
source_mask: NDArray[np.bool_] | None = None,
num_iterations: int = None,
cell_size=None,
grid_spacing=None,
angles_to_search=None,
final_pass_angles=None,
cell_size: int | NDArray[int] | Iterable[int] | None = None,
grid_spacing: int | NDArray[int] | Iterable[int] | None = None,
angles_to_search: Iterable[float] | NDArray[float] | None = None,
final_pass_angles: Iterable[float] | NDArray[float] | None = None,
max_travel_for_finalization: float = None,
max_travel_for_finalization_improvement: float = None,
min_alignment_overlap: float = None,
Expand Down Expand Up @@ -112,7 +113,7 @@ def __init__(self,
:param bool cupy_processing: True if the refinement will be done on the GPU. When set, arrays are created as cupy arrays instead of NDArrays
"""

self._single_thread_processing = single_thread_processing or nornir_imageregistration.UsingCupy()
self._single_thread_processing = single_thread_processing or nornir_imageregistration.UsingCupy()

if target_image is None:
raise ValueError("target_image must be specified")
Expand Down Expand Up @@ -177,10 +178,10 @@ def __init__(self,
def CreateWithPreprocessedImages(target_img_data: nornir_imageregistration.ImagePermutationHelper,
source_img_data: nornir_imageregistration.ImagePermutationHelper,
num_iterations: int = None,
cell_size=None,
grid_spacing=None,
angles_to_search=None,
final_pass_angles=None,
cell_size: int | NDArray[int] | Iterable[int] | None = None,
grid_spacing: int | NDArray[int] | Iterable[int] | None = None,
angles_to_search: Iterable[float] | NDArray[float] | None = None,
final_pass_angles: Iterable[float] | NDArray[float] | None = None,
max_travel_for_finalization: float = None,
max_travel_for_finalization_improvement: float = None,
min_alignment_overlap: float = None,
Expand Down
73 changes: 73 additions & 0 deletions nornir_imageregistration/settings/stos_brute.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
from numpy.typing import NDArray
import numpy as np
from typing import NamedTuple, Sequence, Iterable
from pydantic import BaseModel
from nornir_imageregistration.settings.angle_range import AngleSearchRange


class StosBruteSettings(BaseModel):
"""Encodes the settings required or used to invoke StosBrute"""
angles: AngleSearchRange | Sequence[float] | None = None
min_overlap: float = 0.75 # The minimum amount of overlap we require in the images. Higher values reduce false positives but may not register offset images
source_image_scale_factors: tuple[float] | None = None
"""Amount to scale the warped image before attempting registration,
this handles cases where multiple scopes are used with slightly differnt magnification values """

larget_dimension: int | None = None # The input images should be scaled so the largest image dimension is equal to this value, default is 1024. None means use the actual image size
try_flipped: bool = False # If True the algorithm will test the flipped version of the source image too

def __init__(self,
angles: AngleSearchRange | Sequence[float] | None = None,
min_overlap: float = 0.75,
source_image_scale_factors: NDArray[float] | None = None,
larget_dimension: int | None = 1024,
try_flipped: bool = False):
"""
:param angles: Angles to search for the best control point alignment or None if all angles should be searched
:param min_overlap: Minimum amount of overlap we require to consider a registration to be valiud
:param source_image_scale_factors: Amount to scale the warped image before attempting registration, this handles cases where multiple scopes are used with slightly differnt magnification values
:param larget_dimension: The input images should be scaled so the largest image dimension is equal to this value, default is 1024. None means use the actual image size
:param try_flipped: If True the algorithm will test the flipped version of the source image too
"""
super().__init__()
self.angles = angles
self.min_overlap = min_overlap
self.larget_dimension = larget_dimension
self.try_flipped = try_flipped

self.source_image_scale_factors = source_image_scale_factors

if self.source_image_scale_factors is None:
pass
elif isinstance(source_image_scale_factors, np.ndarray):
self.source_image_scale_factors = source_image_scale_factors
elif hasattr(source_image_scale_factors, '__iter__'):
self.source_image_scale_factors = np.array(source_image_scale_factors, float)
else:
self.source_image_scale_factors = np.array([source_image_scale_factors, source_image_scale_factors], float)

def angle_range_defined(self) -> bool:
return self.angles is not None

@property
def angle_range(self) -> NDArray[float]:
""":return: The range of angles to search for the best control point alignment or None if all angles should be searched"""
if self.angles is None:
return np.array(range(-178, 182, 2), float)

if isinstance(self.angles, np.ndarray):
return self.angles
elif isinstance(self.angles, AngleSearchRange):
return self.angles.angle_range
elif isinstance(self.angles, Iterable):
return np.array(list(self.angles), float)

raise ValueError(f"Unexpected type for self.angle_search_settings: {self.angles.__class__}")

@property
def source_image_scaling_required(self) -> bool:
if self.source_image_scale_factors is None:
return False

return np.any(self.source_image_scale_factors != 1)
Loading

0 comments on commit ee9c655

Please sign in to comment.