Newer
Older
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL license under French law and
# abiding by the rules of distribution of free software. You can use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty and the software's author, the holder of the
# economic rights, and the successive licensors have only limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean that it is complicated to manipulate, and that also
# therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
# Time profiling:
# python -m cProfile -o runtime/profiling.log -s name main.py
# Memory profiling:
# python -m memory_profiler main.py
# or
# mprof run main.py
# mprof plot
NADAL Morgane
committed
import pandas as pd_
import skimage.exposure as ex_
import skimage.measure as ms_
import skimage.morphology as mp_
import brick.component.connection as cn_
import brick.component.extension as xt_
import brick.component.soma as sm_
import brick.feature.graph as ge_
import brick.image.input as in_
import brick.image.unit
import brick.io.console.feedback
import brick.io.screen.feedback as fb_
import brick.io.screen.plot as po_
import brick.io.screen.soma_validation as smvl
import brick.processing.dijkstra_1_to_n as dk_
from brick.io.storage.parameters import NutrimorphArguments
from skl_graph.graph import skl_graph_t
from skl_graph.map import SKLMapFromObjectMap
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
def NutriMorph(
data_path: str,
save_images,
save_csv,
channel=None,
dilatation_erosion=None,
size_voxel_in_micron=None,
soma_low_c=None,
soma_high_c=None,
soma_selem_micron_c=None,
soma_min_area_c=None,
soma_max_area_c=None,
adapt_hist_equalization=None,
ext_low_c=None,
ext_high_c=None,
ext_selem_micron_c=None,
ext_min_area_c=None,
max_straight_sq_dist_c=None,
max_weighted_length_c=None,
scale_range=None,
scale_step=None,
alpha=None,
beta=None,
frangi_c=None,
diff_mode=None,
bright_on_dark=None,
method=None,
hist_min_length=None,
hist_step_length=None,
number_of_bins_length=None,
hist_bins_borders_length=None,
hist_min_curvature=None,
hist_step_curvature=None,
number_of_bins_curvature=None,
hist_bins_borders_curvature=None,
with_plot=None,
interactive=None,
in_parallel=None,
) -> pd_.DataFrame():
""""""
soma_t = sm_.soma_t
extension_t = xt_.extension_t
print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
start_time = tm_.time()
# TODO add proper warning with warn module
# --- Images
# Find the name of the file, eq. to the biological tested condition number x
name_file = os_.path.basename(data_path)
print("FILE: ", name_file)
name_file = name_file.replace(".tif", "")
name_dir = os_.path.dirname(data_path)
print("DIR: ", name_dir)
# Find the dimension of the image voxels in micron
# TODO do something more general, here too specific to one microscope metadata
size_voxel_in_micron = brick.image.unit.FindVoxelDimensionInMicron(
data_path, size_voxel_in_micron=size_voxel_in_micron
)
# Read the image
image = io_.volread(data_path)
# Image size verification - simple version without user interface
image = in_.ImageVerification(image, channel)
# iv_.image_verification(image, channel) # -> PySide2 user interface # TODO: must return the modified image!
# /!\ conflicts between some versions of PySide2 and Python3
# image = image[:, 800:, 300:]
image = image[:, 800:, 500:]
# image = image[:, 600:, 500:]
img_shape = image.shape
img_shape_as_str = str(img_shape)[1:-1].replace(", ", "x")
f"IMAGE: s.{img_shape_as_str} t.{image.dtype} m.{np_.amin(image)} M.{np_.amax(image)}"
# Intensity relative normalization (between 0 and 1).
# Image for soma normalized for hysteresis
image_for_soma = in_.IntensityNormalizedImage(image)
# Image for extensions not normalized for frangi enhancement
image_for_ext = image.copy()
print(
f"NRM-IMG: t.{image_for_soma.dtype} m.{np_.amin(image_for_soma):.2f} M.{np_.amax(image_for_soma):.2f}"
)
# --- Initialization
som_nfo = {}
ext_nfo = {}
axes = {}
#
# --- Somas
print("\n--- Soma Detection")
# Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
soma_min_area_c = brick.image.unit.ToPixel(
soma_min_area_c, size_voxel_in_micron, dimension=(0, 1, 2)
)
soma_max_area_c = brick.image.unit.ToPixel(
soma_max_area_c, size_voxel_in_micron, dimension=(0, 1, 2)
)
soma_selem_c = mp_.disk(
brick.image.unit.ToPixel(soma_selem_micron_c, size_voxel_in_micron)
)
# - Create the maps for enhancing and detecting the soma
# Hysteresis thresholding and closing/opening
print("Hysteresis thresholding: ", end="")
som_nfo["map_small"] = soma_t.Map(
image_for_soma, soma_low_c, soma_high_c, soma_selem_c
)
print("Done")
# Deletion of too small elements
print("Morphological cleaning: ", end="")
som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(
som_nfo["map_small"], soma_min_area_c
)
# Deletion of too big elements (probably aggregation of cells)
# Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(
som_nfo["map_small"], som_lmp_small, soma_max_area_c
)
print("Done")
# Labelling of somas, find the number of somas
print("Somas labelling: ", end="")
som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)
print("Done")
# Use relabel instead of label to optimize the algorithm. /!\ But BUG.
# som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
# n_somas = som_nfo["lmp"].max()
# Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
print("Distance and Influence Map creation: ", end="")
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(
som_nfo["lmp"]
)
print("Done")
# Create the tuple somas, containing all the objects 'soma'
print("Somas creation: ", end="")
somas = tuple(
soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1)
)
print("Done")
print(f" n = {n_somas}")
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomas(somas, som_nfo, axes)
window = smvl.soma_validation_window_t(
image_for_soma,
som_nfo["lmp"],
mip_axis=0,
)
n_somas = window.LaunchValidation()
print(f"Validated Somas: {n_somas}")
#
print("\n--- Extension Detection")
# Set to zeros the pixels belonging to the somas.
# Take into account the aggregation of cells detected as one big soma to avoid extension creation there
del_soma = (som_nfo["map_small"] > 0).nonzero()
image_for_ext[del_soma] = 0
# Change the extensions parameters from micron to pixel dimensions
ext_min_area_c = brick.image.unit.ToPixel(
ext_min_area_c, size_voxel_in_micron, dimension=(0, 1, 2)
)
if ext_selem_micron_c == 0:
ext_selem_pixel_c = None
else:
ext_selem_pixel_c = mp_.disk(
brick.image.unit.ToPixel(ext_selem_micron_c, size_voxel_in_micron)
)
# - Perform frangi feature enhancement (via python or c - faster - implementation)
enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
image_for_ext,
scale_range,
scale_step,
alpha,
beta,
frangi_c,
bright_on_dark,
method,
diff_mode=diff_mode,
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
# - Creation of the enhanced maps
# Enhanced the contrast in frangi output
if adapt_hist_equalization:
# necessary but not sufficient, histogram equalisation (adaptative)
enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
else:
# necessary but not sufficient, histogram equalisation (global)
enhanced_ext = ex_.equalize_hist(enhanced_ext)
# Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
# Hysteresis thersholding
print("Hysteresis Thresholding: ", end="")
ext_nfo["coarse_map"] = extension_t.CoarseMap(
enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c
)
print("Done")
# Deletion of too small elements
print("Morphological cleaning: ", end="")
ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(
ext_nfo["coarse_map"], ext_min_area_c
)
print("Done")
# Creation of the extensions skeleton
ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
# Deletion of extensions found within the somas
ext_nfo["map"][som_nfo["map"] > 0] = 0
# Labelling of extensions and number of extensions determined
ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)
# Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
# ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
# n_extensions = ext_nfo["lmp"].max()
# Create the tuple extensions, containing all the objects 'extension'
extensions = tuple(
extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
for uid in range(1, n_extensions + 1)
)
print("Done")
print(f" n = {n_extensions}")
# Create global end point map for extensions
glob_ext_ep_map = xt_.EndPointMap(ext_nfo["lmp"] > 0)
all_end_points = list(
zip(*glob_ext_ep_map.nonzero())
) # updated in ValidateConnexion
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotExtensions(extensions, ext_nfo, img_shape)
# -- Preparation for Connections
# Calculate the COSTS of each pixel on the image and DILATE the existing extensions to
# avoid tangency of connexions to extensions
dijkstra_costs = dk_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
if dilatation_erosion:
# Dilate all extensions
for extension in extensions:
cn_.Dilate(extension.sites, dijkstra_costs)
# -- Soma-Extention
print("\n--- Soma <-> Extension")
# Change the extensions parameters from micron to pixel dimensions
max_straight_sq_dist_c = brick.image.unit.ToPixel(
max_straight_sq_dist_c, size_voxel_in_micron
)
max_weighted_length_c = brick.image.unit.ToPixel(
max_weighted_length_c, size_voxel_in_micron
)
# Find the candidate extensions, with their endpoints, for connexion to the somas
candidate_conn_nfo = cn_.CandidateConnections(
somas,
som_nfo["influence_map"],
som_nfo["dist_to_closest"],
extensions,
max_straight_sq_dist_c,
)
# For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
# Only try to connect if the extension is not already connected
if extension.is_unconnected:
print(f" Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
if dilatation_erosion:
# Erode the end_point of the extension in the costs map
cn_.Erode(
(end_point,),
dijkstra_costs,
extension,
extensions,
image,
all_end_points=all_end_points,
)
# Use of Dijkstra shortest weighted path
path, length = cn_.ShortestPathFromToN(
image=image,
point=end_point,
extension=extension,
costs=dijkstra_costs,
candidate_points_fct=soma.ContourPointsCloseTo,
max_straight_sq_dist=max_straight_sq_dist_c,
)
# Keep the connexion only if inferior to the allowed max weighted distance
# length in pixel and max_weighted_length_c too
if length <= max_weighted_length_c:
# Validate and update all the fields + dilate again the whole extension
cn_.ValidateConnection(
soma,
extension,
end_point,
path,
dijkstra_costs,
all_ep=all_end_points,
)
if dilatation_erosion:
# Dilate extensions
cn_.Dilate(extension.sites, dijkstra_costs)
print(": Made")
else:
if dilatation_erosion:
cn_.Dilate(extension.sites, dijkstra_costs)
print("")
else:
if dilatation_erosion:
cn_.Dilate(extension.sites, dijkstra_costs)
print("")
brick.io.console.feedback.PrintConnectedExtensions(extensions)
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
# -- Extention-Extention
print("\n--- Extension <-> Extension")
should_look_for_connections = True
# Update the soma maps with next Ext-Ext connexions
while should_look_for_connections:
if dilatation_erosion:
# Update the costs by dilating everything again plus the contour points of the soma
for soma in somas:
cn_.Dilate(soma.contour_points, dijkstra_costs)
# Below probably not necessary but security
for extension in soma.extensions:
cn_.Dilate(extension.sites, dijkstra_costs)
# Update the final somas + ext map by adding the new extensions and their connexion paths
som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
# Update the influence map
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(
som_nfo["soma_w_ext_lmp"]
)
# Find the candidate extensions for connexion to the primary extensions
candidate_conn_nfo = cn_.CandidateConnections(
somas,
som_nfo["influence_map"],
som_nfo["dist_to_closest"],
extensions,
max_straight_sq_dist_c,
)
should_look_for_connections = False
# For each candidate, verify if valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
# Only try to connect if the extension is not already connected
if extension.is_unconnected:
print(
f" Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end=""
)
if dilatation_erosion:
# Erode the end_point of the extension in the costs map
cn_.Erode(
(end_point,),
dijkstra_costs,
extension,
extensions,
image,
all_end_points=all_end_points,
)
# Use of Dijkstra shortest weighted path
path, length = cn_.ShortestPathFromToN(
image=image,
point=end_point,
extension=extension,
costs=dijkstra_costs,
candidate_points_fct=soma.ExtensionPointsCloseTo,
extensions=extensions,
all_end_points=all_end_points,
max_straight_sq_dist=max_straight_sq_dist_c,
erode_path=dilatation_erosion,
)
# Keep the connexion only if inferior to the allowed max weighted distance
if length <= max_weighted_length_c:
# Verify that the last element of the connexion path is contained in the extension to be connected
# If so, returns the extensions containing the path last site.
tgt_extenstion = extension_t.ExtensionContainingSite(
extensions, path[-1]
)
# Validate connexion: update the glial component objects and store more info in their fields
cn_.ValidateConnection(
tgt_extenstion,
extension,
end_point,
path,
dijkstra_costs,
all_ep=all_end_points,
)
if dilatation_erosion:
# Dilate extensions
cn_.Dilate(extension.sites, dijkstra_costs)
should_look_for_connections = True
print(": Made")
else:
if dilatation_erosion:
cn_.Dilate(extension.sites, dijkstra_costs)
print("")
else:
if dilatation_erosion:
cn_.Dilate(extension.sites, dijkstra_costs)
print("")
# -- Create an extension map containing all the ext + connexions, without the somas.
# Used for the graphs extraction
ext_nfo["lmp_soma"] = som_nfo["soma_w_ext_lmp"].copy()
ext_nfo["lmp_soma"][som_nfo["map"] > 0] = 0
#
brick.io.console.feedback.PrintConnectedExtensions(extensions)
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
# -- Summary
print("\n")
for soma in somas:
print(soma)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
pl_.show()
po_.MaximumIntensityProjectionZ(
som_nfo["soma_w_ext_lmp"],
block=False,
output_image_file_name=str(pathlib.Path(save_images) / f"MIP_{name_file}.png"),
# --- Extract all the extensions of all somas as a graph
# Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
for soma in somas:
print(f" Soma {soma.uid}", end="")
# Create SKLGraph skeletonized map
# Create the graph from the SKLGaph skeletonized map
soma.skl_graph = skl_graph_t.FromSKLMap(
ext_map, width_map=size_voxel_in_micron
# --- Find the root of the {ext+conn} graphs.
# Roots are the nodes of degree 1 that are to be linked to the soma
soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)
# Add a node "soma" and link it to the root nodes
soma_node = (
f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
)
soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)
for node in soma.graph_roots.values():
soma.skl_graph.add_edge(node, soma_node, root=True)
if save_images is not None:
nx_.draw_networkx(soma.skl_graph)
pl_.savefig(str(pathlib.Path(save_images) / f"graph_{name_file}_soma{soma.uid}.png"))
print(": Done")
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
# --- Extract features
# Parameters
if hist_bins_borders_length is None:
number_of_bins_length = int(number_of_bins_length)
bins_length = np_.linspace(
hist_min_length,
hist_min_length + hist_step_length * number_of_bins_length,
num=number_of_bins_length,
)
bins_length[-1] = np_.inf
else:
bins_length = np_.array(hist_bins_borders_length)
bins_length[-1] = np_.inf
if hist_bins_borders_curvature is None:
number_of_bins_curvature = int(number_of_bins_curvature)
bins_curvature = np_.linspace(
hist_min_curvature,
hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
num=number_of_bins_curvature,
)
bins_curvature[-1] = np_.inf
else:
bins_curvature = np_.array(hist_bins_borders_curvature)
bins_curvature[-1] = np_.inf
# Pandas dataframe creation with all the measured features
features_df = ge_.ExtractFeaturesInDF(
name_file,
somas,
size_voxel_in_micron,
bins_length,
bins_curvature,
ext_scales,
)
# Save the pandas df into .csv
if save_csv is not None:
features_df.to_csv(f"{save_csv}/{name_file}.csv")
features_df.to_csv(f"{name_dir}/{name_file}.csv")
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}\n")
return features_df
def Main():
""""""
if sy_.argv.__len__() < 2:
print("Missing parameter file argument")
sy_.exit(0)
argument = sy_.argv[1]
if not (
os_.path.isfile(argument)
and os_.access(argument, os_.R_OK)
and (os_.path.splitext(argument)[1].lower() == ".ini")
):
print("Wrong parameter file path or type, or parameter file unreadable")
sy_.exit(0)
(
data_path,
save_images,
save_csv,
save_features_analysis,
statistical_analysis,
arguments,
) = NutrimorphArguments(argument)
# Differentiate between path to a tiff file or to a repository
if pathlib.Path(data_path).is_file():
# Perform NutriMorph algorithm on the file entered in parameters
print(
"WARNING: Will not perform features analysis on a single image.\n For features analysis, "
"give a directory path.\n"
)
_ = NutriMorph(data_path, save_images, save_csv, **arguments)
elif pathlib.Path(data_path).is_dir():
# Keep the directory to the repository
# Initialize the future concatenated features
concatenated_features_df = pd_.DataFrame()
# Find all the tiff files in the parent and child repositories
for path in pathlib.Path(data_path).glob("**/*.tif"):
if path.is_file():
features_df = NutriMorph(str(path), save_images, save_csv, **arguments)
# Concatenate all the dataframes
concatenated_features_df = concatenated_features_df.append(features_df)
# If some rows (ie. somas0) have NaN, drop them
# -- due to best fitting ellipsoid algo (JTJ is singular due to convex hull being flat)
concatenated_features_df = concatenated_features_df.dropna()
# Save to .csv in the parent repository
if save_csv is None:
concatenated_features_df.to_csv(f"{data_path}/features.csv")
concatenated_features_df.to_csv(f"{save_csv}/features.csv")
NADAL Morgane
committed
# Clustering with this df and module features_analysis.py
if statistical_analysis:
os_.system(
f"feature_analysis.py {save_csv}/features.csv {save_features_analysis}"
)
raise ImportError(f"{data_path}: Not a valid data path!")
print("\n\n--- NOT IMPORTING MEMORY PROFILING ---\n\n")
# import brick.auto_decorator as dctr
# dctr.MemoryProfileCallables(__name__)
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
if __name__ == "__main__":
#
Main()
# data_path = None
# channel = None
# save_images = None
# save_csv = None
# save_features_analysis = None
# dilatation_erosion = None
# size_voxel_in_micron = None
# statistical_analysis = None
# soma_low_c = None
# soma_high_c = None
# soma_selem_micron_c = None
# soma_min_area_c = None
# soma_max_area_c = None
# adapt_hist_equalization = None
# ext_low_c = None
# ext_high_c = None
# ext_selem_micron_c = None
# ext_min_area_c = None
# ext_max_length_c = None
# max_straight_sq_dist_c = None
# max_weighted_length_c = None
# scale_range = None
# scale_step = None
# alpha = None
# beta = None
# frangi_c = None
# diff_mode = None
# bright_on_dark = None
# method = None
# hist_min_length = None
# hist_step_length = None
# number_of_bins_length = None
# hist_bins_borders_length = None
# hist_min_curvature = None
# hist_step_curvature = None
# number_of_bins_curvature = None
# hist_bins_borders_curvature = None
# with_plot = None
# interactive = None
# in_parallel = None