Mentions légales du service

Skip to content
Snippets Groups Projects
Commit 240dcd09 authored by DEBREUVE Eric's avatar DEBREUVE Eric
Browse files

slightly lower mem usage in frangi

parent 7bbfba10
No related branches found
No related tags found
No related merge requests found
......@@ -2,6 +2,9 @@
.mypy_cache/
__pycache__/
__version__/
data/
runtime/
mprofile_*.dat
......@@ -37,6 +37,7 @@ from type import array_t, site_h, site_path_h
import itertools as it_
from typing import Callable, Sequence, Tuple
import cython as cy_
import numpy as np_
......@@ -45,7 +46,7 @@ def CandidateConnections(
influence_map: array_t,
dist_to_closest: array_t,
extensions: Sequence[extension_t],
max_straight_sq_dist: float = np_.inf,
max_straight_sq_dist: cy_.double = np_.inf,
) -> list:
#
candidate_conn_nfo = [] # conn=connection
......@@ -67,8 +68,8 @@ def ShortestPathFromToN(
point: site_h,
costs: array_t,
candidate_points_fct: Callable,
max_straight_sq_dist: float = np_.inf,
) -> Tuple[site_path_h, float]:
max_straight_sq_dist: cy_.double = np_.inf,
) -> Tuple[site_path_h, cy_.double]:
#
candidate_points, candidate_indexing = candidate_points_fct(
point, max_straight_sq_dist
......
cimport numpy as np_
ctypedef np_.float64_t real_pix_v_t
#ctypedef np_.ndarray[real_pix_v_t, ndim=3] real_img_t
#cpdef tuple FrangiEnhancement(
# np_.ndarray[real_pix_v_t, ndim=2] img,
# tuple scale_range,
# double scale_step = *,
# double alpha = *,
# double beta = *,
# double frangi_c = *,
# bint bright_on_dark = *,
# bint in_parallel = *
#)
cdef np_.ndarray[real_pix_v_t, ndim=3] FrangiEnhancementOneScale(
np_.ndarray[real_pix_v_t, ndim=3] img,
double scale,
double alpha = *,
double beta = *,
double frangi_c = *,
bint bright_on_dark = *,
object shared_enhanced = *
)
#cdef tuple[np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3]] HessianMatrix(
# np_.ndarray[real_pix_v_t, ndim=3] img, double scale
#)
#cdef tuple[np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3],np_.ndarray[real_pix_v_t, ndim=3]] EigenValues(
# np_.ndarray[real_pix_v_t, ndim=3] Dxx, np_.ndarray[real_pix_v_t, ndim=3] Dxy, np_.ndarray[real_pix_v_t, ndim=3] Dxz, np_.ndarray[real_pix_v_t, ndim=3] Dyy, np_.ndarray[real_pix_v_t, ndim=3] Dyz, np_.ndarray[real_pix_v_t, ndim=3] Dzz
#)
cdef np_.ndarray[real_pix_v_t, ndim=3] SortedByAbsValues(np_.ndarray[real_pix_v_t, ndim=3] array, int axis = *)
cdef np_.ndarray[real_pix_v_t, ndim=3] CarefullDivision(np_.ndarray[real_pix_v_t, ndim=3] array_1, np_.ndarray[real_pix_v_t, ndim=3] array_2)
......@@ -32,21 +32,20 @@
from type import array_t
import multiprocessing as pl_
# from ctypes import c_bool as c_bool_t
from ctypes import c_float as c_float_t
from multiprocessing.sharedctypes import Array as shared_array_t
# from multiprocessing.sharedctypes import Value as shared_value_t
from typing import List, Tuple
from typing import Optional, Tuple
import numpy as np_
from scipy import ndimage as im_
_hessian_matrices = None
def FrangiEnhancement(
img: array_t,
scale_range: Tuple[float, float] = (1.0e-10, 10.0),
scale_range: Tuple[float, float],
scale_step: float = 2.0,
alpha: float = 0.5,
beta: float = 0.5,
......@@ -55,8 +54,7 @@ def FrangiEnhancement(
in_parallel: bool = False,
) -> Tuple[array_t, array_t]:
#
enhanced = None
scale_map = None
global _hessian_matrices
img = img.astype(np_.float32, copy=False)
scales = np_.linspace(
......@@ -65,97 +63,124 @@ def FrangiEnhancement(
num=np_.ceil((scale_range[1] - scale_range[0]) / scale_step) + 1,
)
if in_parallel:
# initializer = np_.zeros_like(img, dtype = c_float_t)
# shared_not_init = shared_value_t(c_bool_t, True)
# shared_enhanced = shared_array_t(c_float_t, img.size)
# shared_scale_map = shared_array_t(c_float_t, img.size)
# kwargs = {
# "alpha": alpha,
# "beta": beta,
# "frangi_c": frangi_c,
# "bright_on_dark": bright_on_dark,
# "not_init": shared_not_init,
# "enhanced": shared_enhanced,
# "scale_map": shared_scale_map,
# }
shared_enhanced_s = tuple(shared_array_t(c_float_t, img.size) for ___ in scales)
processes = tuple(
pl_.Process(
target=FrangiEnhancementOneScale,
args=(img, scale),
kwargs={
"alpha": alpha,
"beta": beta,
"frangi_c": frangi_c,
"bright_on_dark": bright_on_dark,
"shared_enhanced": shared_enhanced,
},
)
for scale, shared_enhanced in zip(scales, shared_enhanced_s)
if in_parallel and (pl_.get_start_method(allow_none=False) == "fork"):
_hessian_matrices = None
FrangiEnhancement_fct = _FrangiEnhancementParallel
else:
_hessian_matrices = np_.empty((*img.shape, 3, 3), dtype=np_.float32)
FrangiEnhancement_fct = _FrangiEnhancementSequential
enhanced, scale_map = FrangiEnhancement_fct(
img,
scales,
alpha=alpha,
beta=beta,
frangi_c=frangi_c,
bright_on_dark=bright_on_dark,
)
_hessian_matrices = None
return enhanced, scale_map
def _FrangiEnhancementSequential(
img: array_t,
scales: array_t,
alpha: float = 0.5,
beta: float = 0.5,
frangi_c: float = 500.0,
bright_on_dark: bool = True,
) -> Tuple[array_t, array_t]:
#
enhanced = None
scale_map = None
for s_idx, scale in enumerate(scales):
local_enhanced = _FrangiEnhancementOneScale(
img,
scale,
alpha=alpha,
beta=beta,
frangi_c=frangi_c,
bright_on_dark=bright_on_dark,
)
for process in processes:
process.start()
for process in processes:
process.join()
# enhanced = np_.array(shared_enhanced).astype(np_.float32, copy = False).reshape(img.shape)
# scale_map = np_.array(shared_scale_map).astype(np_.float32, copy = False).reshape(img.shape)
enhanced = np_.array(shared_enhanced_s[0])
scale_map = np_.full(img.size, scales[0], dtype=np_.float32)
for s_idx, shared_enhanced in enumerate(shared_enhanced_s[1:], start=1):
local_enhanced = np_.array(shared_enhanced)
if s_idx > 0:
larger_map = local_enhanced > enhanced
enhanced[larger_map] = local_enhanced[larger_map]
scale_map[larger_map] = scales[s_idx]
enhanced = enhanced.astype(np_.float32, copy=False).reshape(img.shape)
scale_map = scale_map.astype(np_.float32, copy=False).reshape(img.shape)
else:
for s_idx, scale in enumerate(scales):
local_enhanced = FrangiEnhancementOneScale(
img,
scale,
alpha=alpha,
beta=beta,
frangi_c=frangi_c,
bright_on_dark=bright_on_dark,
)
if s_idx > 0:
larger_map = local_enhanced > enhanced
enhanced[larger_map] = local_enhanced[larger_map]
scale_map[larger_map] = scale
else:
enhanced = local_enhanced
scale_map = np_.full(img.shape, scale, dtype=np_.float32)
scale_map[larger_map] = scale
else:
enhanced = local_enhanced
scale_map = np_.full(img.shape, scale, dtype=np_.float32)
return enhanced, scale_map
def _FrangiEnhancementParallel(
img: array_t,
scales: array_t,
alpha: float = 0.5,
beta: float = 0.5,
frangi_c: float = 500.0,
bright_on_dark: bool = True,
) -> Tuple[array_t, array_t]:
#
fixed_args = {
"alpha": alpha,
"beta": beta,
"frangi_c": frangi_c,
"bright_on_dark": bright_on_dark,
}
shared_enhanced_s = tuple(shared_array_t(c_float_t, img.size) for ___ in scales)
processes = tuple(
pl_.Process(
target=_FrangiEnhancementOneScale,
args=(img, scale),
kwargs={**fixed_args, "shared_enhanced": shared_enhanced,},
)
for scale, shared_enhanced in zip(scales, shared_enhanced_s)
)
for process in processes:
process.start()
for process in processes:
process.join()
enhanced = np_.array(shared_enhanced_s[0])
scale_map = np_.full(img.size, scales[0], dtype=np_.float32)
for s_idx, shared_enhanced in enumerate(shared_enhanced_s[1:], start=1):
local_enhanced = np_.array(shared_enhanced)
larger_map = local_enhanced > enhanced
enhanced[larger_map] = local_enhanced[larger_map]
scale_map[larger_map] = scales[s_idx]
enhanced = enhanced.astype(np_.float32, copy=False).reshape(img.shape)
scale_map = scale_map.astype(np_.float32, copy=False).reshape(img.shape)
return enhanced, scale_map
def FrangiEnhancementOneScale(
def _FrangiEnhancementOneScale(
img: array_t,
scale: float,
alpha: float = 0.5,
beta: float = 0.5,
frangi_c: float = 500.0,
bright_on_dark: bool = True,
shared_enhanced=None,
# not_init = None,
# enhanced =None,
# scale_map =None,
) -> array_t:
shared_enhanced: shared_array_t = None,
) -> Optional[array_t]:
#
print(f"scale={scale}")
print(f" scale={scale}")
Dxx, Dxy, Dxz, Dyy, Dyz, Dzz = HessianMatrix(img, scale)
eig_val_1, eig_val_2, eig_val_3 = EigenValues(Dxx, Dxy, Dxz, Dyy, Dyz, Dzz)
sorted_eigenvalues = _EigenValuesOfHessianMatrix(img, scale)
# Absolute plate, blob, and background maps
abs_eig_val_1 = np_.fabs(eig_val_1)
abs_eig_val_2 = np_.fabs(eig_val_2)
abs_eig_val_3 = np_.fabs(eig_val_3)
plate_map = CarefullDivision(abs_eig_val_2, abs_eig_val_3)
blob_map = CarefullDivision(abs_eig_val_1, np_.sqrt(abs_eig_val_2 * abs_eig_val_3))
bckgnd = np_.sqrt(abs_eig_val_1 ** 2 + abs_eig_val_2 ** 2 + abs_eig_val_3 ** 2)
abs_eig_val = np_.fabs(sorted_eigenvalues)
plate_map = _CarefullDivision(abs_eig_val[...,1], abs_eig_val[...,2])
blob_map = _CarefullDivision(abs_eig_val[...,0], np_.sqrt(np_.prod(abs_eig_val[...,1:], axis = -1)))
bckgnd = np_.sqrt(np_.sum(abs_eig_val ** 2, axis = -1))
# Normalized plate, blob, and background maps
plate_factor = -2.0 * alpha ** 2
......@@ -167,71 +192,61 @@ def FrangiEnhancementOneScale(
local_enhanced = plate_map * blob_map * bckgnd
if bright_on_dark:
local_enhanced[eig_val_2 > 0.0] = 0.0
local_enhanced[eig_val_3 > 0.0] = 0.0
eig_condition = np_.any(sorted_eigenvalues[...,1:] > 0.0, axis=-1)
else:
local_enhanced[eig_val_2 < 0.0] = 0.0
local_enhanced[eig_val_3 < 0.0] = 0.0
local_enhanced[
np_.logical_or(np_.isnan(local_enhanced), np_.isinf(local_enhanced))
] = 0.0
eig_condition = np_.any(sorted_eigenvalues[...,1:] < 0.0, axis=-1)
invalid_condition = np_.logical_not(np_.isfinite(local_enhanced))
local_enhanced[np_.logical_or(eig_condition, invalid_condition)] = 0.0
# if enhanced is None:
if shared_enhanced is None:
return local_enhanced
else:
shared_enhanced[:] = local_enhanced.flatten()[:]
def HessianMatrix(
img: array_t, scale: float
) -> Tuple[array_t, array_t, array_t, array_t, array_t, array_t]:
def _EigenValuesOfHessianMatrix(img: array_t, scale: float) -> array_t:
#
global _hessian_matrices
if scale > 0.0:
img = scale ** 2 * im_.gaussian_filter(img, scale)
else:
img = scale ** 2 * img
if _hessian_matrices is None:
hessian_matrices = np_.empty((*img.shape, 3, 3), dtype=np_.float32)
else:
hessian_matrices = _hessian_matrices
Dx, Dy, Dz = np_.gradient(img)
(
hessian_matrices[..., 0, 0],
hessian_matrices[..., 0, 1],
hessian_matrices[..., 0, 2],
) = np_.gradient(Dx)
hessian_matrices[..., 1, 1], hessian_matrices[..., 1, 2] = np_.gradient(
Dy, axis=(1, 2)
)
hessian_matrices[..., 2, 2] = np_.gradient(Dz, axis=2)
Dxx, Dxy, Dxz = np_.gradient(Dx, axis=(0, 1, 2))
Dyy, Dyz = np_.gradient(Dy, axis=(1, 2))
Dzz = np_.gradient(Dz, axis=2)
return Dxx, Dxy, Dxz, Dyy, Dyz, Dzz
def EigenValues(
Dxx: array_t, Dxy: array_t, Dxz: array_t, Dyy: array_t, Dyz: array_t, Dzz: array_t
) -> List[array_t]:
#
hessians = np_.stack((Dxx, Dxy, Dxz, Dxy, Dyy, Dyz, Dxz, Dyz, Dzz), axis=-1)
hessians = np_.reshape(hessians, (*Dxx.shape, 3, 3))
eigenvalues = np_.linalg.eigvalsh(hessians)
sorted_eigenvalues = SortedByAbsValues(eigenvalues, axis=-1)
return [
np_.squeeze(eigenvalue, axis=-1)
for eigenvalue in np_.split(
sorted_eigenvalues, sorted_eigenvalues.shape[-1], axis=-1
)
]
hessian_matrices[..., 1, 0] = hessian_matrices[..., 0, 1]
hessian_matrices[..., 2, 0] = hessian_matrices[..., 0, 2]
hessian_matrices[..., 2, 1] = hessian_matrices[..., 1, 2]
eigenvalues = np_.linalg.eigvalsh(hessian_matrices)
def SortedByAbsValues(array: array_t, axis: int = 0) -> array_t:
#
index = list(np_.ix_(*[np_.arange(size) for size in array.shape]))
index[axis] = np_.fabs(array).argsort(axis=axis)
# Sorted by abs value
index = list(np_.ix_(*[np_.arange(size) for size in eigenvalues.shape]))
index[-1] = np_.fabs(eigenvalues).argsort(axis=-1)
sorted_eigenvalues = eigenvalues[tuple(index)]
return array[tuple(index)]
return sorted_eigenvalues
def CarefullDivision(array_1: array_t, array_2: array_t) -> array_t:
def _CarefullDivision(array_1: array_t, array_2: array_t) -> array_t:
#
null_map = array_2 == 0.0
if null_map.any():
denominator = np_.copy(array_2)
denominator = array_2.copy()
denominator[null_map] = 1.0e-10
else:
denominator = array_2
......
......@@ -29,6 +29,14 @@
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
# Time profiling:
# python -m cProfile -o runtime/profiling.log -s name main.py
# Memory profiling:
# python -m memory_profiler main.py
# or
# mprof run main.py
# mprof plot
import connection as cn_
import extension as xt_
import feedback as fb_
......@@ -36,10 +44,10 @@ from main_prm import *
import soma as sm_
import time as tm_
# import tracemalloc as tr_
import matplotlib.pyplot as pl_
import numpy as np_
import skimage.color as cl_
import skimage.io as io_
import skimage.measure as ms_
......@@ -51,10 +59,13 @@ extension_t = xt_.extension_t
print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
start_time = tm_.time()
# tr_.start()
# --- Images
image = io_.imread(data_path)
image = cl_.rgb2gray(image)[:, 512:, 512:]
assert image.shape.__len__() == 3
image = image[:, 512:, 512:]
img_shape = image.shape
image_for_soma = sm_.NormalizedImage(image)
......@@ -88,6 +99,8 @@ somas = tuple(
)
print(f" n = {n_somas}")
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomas(somas, som_nfo, axes)
......@@ -108,6 +121,8 @@ extensions = tuple(
)
print(f" n = {n_extensions}")
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotExtensions(extensions, ext_nfo, img_shape)
......@@ -147,6 +162,8 @@ for ep_idx, soma, extension, end_point in candidate_conn_nfo:
# soma.Extend(extensions, som_nfo["dist_to_closest"], costs)
fb_.PrintConnectedExtensions(extensions)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
......@@ -188,6 +205,8 @@ while should_look_for_connections:
print("")
fb_.PrintConnectedExtensions(extensions)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
......@@ -199,6 +218,17 @@ for soma in somas:
# for extension in extensions:
# print(extension)
# snapshot = tr_.take_snapshot()
# top_file_stats = snapshot.statistics('lineno')
# print("Memory Profiling: Top 10 FILES")
# for stat in top_file_stats[:10]:
# print(stat)
# top_block_stats = snapshot.statistics('traceback')
# top_block_stats = top_block_stats[0]
# print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
# for line in top_block_stats.traceback.format():
# print(line)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
......
setup.py 0 → 100644
from distutils.core import setup
import numpy as np_
from Cython.Build import cythonize
setup(
ext_modules=cythonize("frangi_3d.py"),
requires=["numpy", "scipy"],
include_dirs=[np_.get_include()],
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment