"README.md" did not exist on "06362cbe7adb869acd093669817c8f0e91abe36b"
Newer
Older
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL license under French law and
# abiding by the rules of distribution of free software. You can use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty and the software's author, the holder of the
# economic rights, and the successive licensors have only limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean that it is complicated to manipulate, and that also
# therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
# Time profiling:
# python -m cProfile -o runtime/profiling.log -s name main.py
# Memory profiling:
# python -m memory_profiler main.py
# or
# mprof run main.py
# mprof plot
import brick.component.connection as cn_
import brick.component.extension as xt_
import brick.component.soma as sm_
import brick.general.feedback as fb_
# import brick.processing.image_verification as iv_
from sklgraph.skl_fgraph import skl_graph_t
from sklgraph.skl_map import skl_map_t
import brick.processing.graph_extraction as ge_
import brick.processing.best_fit_ellipsoid as bf_
import matplotlib.pyplot as pl_
import numpy as np_
import skimage.io as io_
# from skimage.segmentation import relabel_sequential
import skimage.morphology as mp_
import skimage.measure as ms_
import skimage.graph as gr_
NADAL Morgane
committed
import networkx as nx_
import math as mt_
import pandas as pd_
if sy_.argv.__len__() < 2:
print("Missing parameter file argument")
sy_.exit(0)
if not (os_.path.isfile(sy_.argv[1]) and os_.access(sy_.argv[1], os_.R_OK)):
print("Wrong parameter file path or parameter file unreadable")
sy_.exit(0)
data_path = None
channel = None
size_voxel_in_micron = None
soma_low_c = None
soma_high_c = None
soma_selem_micron_c = None
soma_min_area_c = None
ext_low_c = None
ext_high_c = None
ext_selem_micron_c = None
ext_min_area_c = None
max_straight_sq_dist_c = None
max_weighted_length_c = None
scale_range = None
scale_step = None
alpha = None
beta = None
frangi_c = None
bright_on_dark = None
method = None
with_plot = None
in_parallel = None
exec(open(sy_.argv[1]).read()) # Only with search_parameters.py (stable version)
soma_t = sm_.soma_t
extension_t = xt_.extension_t
print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
start_time = tm_.time()
# Find the dimension of the image voxels in micron
size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)
NADAL Morgane
committed
# Image size verification - simple version without user interface
image = in_.ImageVerification(image, channel)
# iv_.image_verification(image, channel) # -> PySide2 user interface # TODO: must return the modified image!
# /!\ conflicts between some versions of PySide2 and Python3
image = image[:, 800:, 800:] # 512 # 562 # Just for development
#
print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")
# Intensity relative normalization (between 0 and 1).
image_for_soma = in_.IntensityNormalizedImage(image)
image_for_ext = in_.IntensityNormalizedImage(image)
print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")
axes = {}
# Change the soma parameters from micron to pixel dimensions
soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
NADAL Morgane
committed
soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))
# Create the maps for enhancing and detecting the soma
som_nfo["map"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
som_nfo["map"], som_lmp = soma_t.FilteredMap(som_nfo["map"], soma_min_area_c)
som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)
# Use relabel instead of label to optimize the algorithm. But BUG.
# som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
# n_somas = som_nfo["lmp"].max()
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(
soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomas(somas, som_nfo, axes)
# Change the extensions parameters from micron to pixel dimensions
ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
if ext_selem_micron_c == 0:
ext_selem_pixel_c = None
else:
ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))
scale_range_pixel = []
for value in scale_range:
value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
scale_range_pixel.append(value_in_pixel)
scale_range = tuple(scale_range_pixel)
scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)
alpha = in_.ToPixel(alpha, size_voxel_in_micron, decimals=1)
beta = in_.ToPixel(beta, size_voxel_in_micron, decimals=1)
frangi_c = in_.ToPixel(frangi_c, size_voxel_in_micron)
# Perform frangi feature enhancement (via python or c - faster - implementation)
enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
image_for_ext,
scale_range,
scale_step,
alpha,
beta,
frangi_c,
bright_on_dark,
method,
in_parallel=in_parallel)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
# Creation of the enhanced maps
ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c) # seuillage
ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c) # min
ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"]) # skeleton
ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)
# Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
# ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
# n_extensions = ext_nfo["lmp"].max()
extensions = tuple(
extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
for uid in range(1, n_extensions + 1))
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotExtensions(extensions, ext_nfo, img_shape)
NADAL Morgane
committed
dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
# -- Soma-Extention
print("\n--- Soma <-> Extension")
# Change the extensions parameters from micron to pixel dimensions
max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)
# Find the candidate extensions for connexion to the somas
candidate_conn_nfo = cn_.CandidateConnections(
somas,
som_nfo["influence_map"],
som_nfo["dist_to_closest"],
extensions,
max_straight_sq_dist_c,
)
# For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
# print(end_point)
print(f" Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
max_straight_sq_dist=max_straight_sq_dist_c,
# print(length == len(path)-2) True when the connexion is made, otherwise, False because length = 0 and len(path)=0
if length <= max_weighted_length_c:
cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
# soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
# Update the soma maps with nex Ext-Ext connexions
should_look_for_connections = True
while should_look_for_connections:
som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(
som_nfo["soma_w_ext_lmp"]
)
# Find the candidate extensions for connexion to the primary extensions
candidate_conn_nfo = cn_.CandidateConnections(
somas,
som_nfo["influence_map"],
som_nfo["dist_to_closest"],
extensions,
max_straight_sq_dist_c,
)
# For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
if extension.is_unconnected:
print(f" Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
max_straight_sq_dist=max_straight_sq_dist_c,
if length <= max_weighted_length_c:
tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)
should_look_for_connections = True
print(": Made")
else:
print("")
# Create an extension map containing all the ext + connexions, without the somas.
ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp'] - som_nfo['lmp']
NADAL Morgane
committed
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
# -- Summary
print("\n")
for soma in somas:
print(soma)
# snapshot = tr_.take_snapshot()
# top_file_stats = snapshot.statistics('lineno')
# print("Memory Profiling: Top 10 FILES")
# for stat in top_file_stats[:10]:
# print(stat)
# top_block_stats = snapshot.statistics('traceback')
# top_block_stats = top_block_stats[0]
# print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
# for line in top_block_stats.traceback.format():
# print(line)
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
if with_plot:
pl_.show()
po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
# po_.MaximumIntensityProjectionZ(ext_nfo['lmp_soma'])
NADAL Morgane
committed
# --- Extract all the extensions of all somas as a graph
NADAL Morgane
committed
print('\n--- Graph extraction')
print('\n- Graph roots')
NADAL Morgane
committed
# Create the graphs
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
somas_features_dict = {} # Dict{soma 1: [features], soma 2: [features], ...}
columns = [
"Coef_V_soma__V_convex_hull",
"Coef_axes_ellips_b__a",
"Coef_axes_ellips_c__a",
"N_nodes",
"N_ext",
"N_primary_ext",
"N_sec_ext",
"highest_degree",
"min_degree",
"mean_degree",
"median_degree",
"max_degree",
"std_degree",
"ext_lengths",
"total_ext_length",
"min_length",
"mean_length",
"median_length",
"max_length",
"std_length",
"entropy_length",
# "hist_length",
"ext_lengths_P",
"total_ext_length_P",
"min_length_P",
"mean_length_P",
"median_length_P",
"max_length_P",
"std_length_P",
"entropy_length_P",
# "hist_length_P",
"ext_lengths_S",
"total_ext_length_S",
"min_length_S",
"mean_length_S",
"median_length_S",
"max_length_S",
"std_length_S",
"entropy_length_S",
# "hist_length_S",
]
NADAL Morgane
committed
for soma in somas:
ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid, store_widths=True, skeletonize=False) # do_post_thinning=True
# to remove pixel that are not breaking connectivity - FineMap in FromShapeMap
soma.skl_graph = skl_graph_t.FromSkeleton(ext_map)
# soma.skl_graph.Plot(mode=plot_mode_e.SKL_Curve, w_directions=True, should_block=False)
# soma.skl_graph.Plot(should_block=True)
# if with_plot:
# pl_.show()
# --- Find the root of the {ext+conn} graphs.
# Roots are the nodes of degree 1 that are to be linked to the soma
print(f"\nSoma {soma.uid}")
soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)
# soma.graph_roots = ge_.FindGraphsRootWithEdges(soma, ext_nfo)
NADAL Morgane
committed
print(soma.graph_roots, '\nn = ', len(soma.graph_roots))
# Add a node "soma" and link it to the root nodes
soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
# soma.skl_graph.add_node(soma_node, soma=True, soma_uid=soma.uid, centroid=soma.centroid, sites=soma.sites)
soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)
for node in soma.graph_roots.values():
NADAL Morgane
committed
soma.skl_graph.add_edge(node, soma_node, root=True)
# print(node, "<->", soma_node,': Done')
# if with_plot:
# pl_.show(block=True)
NADAL Morgane
committed
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
# --- Extract features
# features_df = fe_.ExtractFeaturesInDF()
volume_pixel_micron = round(np_.prod(size_voxel_in_micron), 4)
soma.volume_soma_micron = volume_pixel_micron * len(soma.sites[0])
volume_convex_hull = volume_pixel_micron * bf_.GetConvexHull3D(soma.sites)[1]
Coef_V_soma__V_convex_hull = soma.volume_soma_micron / volume_convex_hull ## TODO keep as prm
print(f"Volume soma = {soma.volume_soma_micron}\n"
f"Volume soma / Volume Convex Hull = {Coef_V_soma__V_convex_hull}"
)
soma.axes_ellipsoid = bf_.FindBestFittingEllipsoid3D(soma)[2]
Coef_axes_ellips_b__a = soma.axes_ellipsoid[1] / soma.axes_ellipsoid[0] ## TODO keep as prm (2)
Coef_axes_ellips_c__a = soma.axes_ellipsoid[2] / soma.axes_ellipsoid[0]
# -- Extension features
# # Graph features # TODO keep as prms (4)
N_nodes = soma.skl_graph.n_nodes # number of nodes
N_ext = soma.skl_graph.n_edges - len(soma.graph_roots) # number of edges except the constructed ones from node soma to the roots
N_primary_ext = len(soma.graph_roots) # number of primary edges = linked to the soma except the constructed ones from node soma to the roots
N_sec_ext = N_ext - N_primary_ext # number of secondary edges = not linked to the soma.
print(
f"N nodes = {N_nodes}\n"
f"N edges = {N_ext}\n"
f"N primary extensions = {N_primary_ext}\n"
f"N secondary extensions = {N_sec_ext}\n"
)
ext_lengths = list(soma.skl_graph.edge_lengths)
for length in ext_lengths:
ext_lengths[length] = in_.ToMicron(length, volume_pixel_micron)
total_ext_length = in_.ToMicron(soma.skl_graph.length, volume_pixel_micron)
# min, mean, median, max and standard deviation of the ALL extensions
min_length = in_.ToMicron(soma.skl_graph.min_length, volume_pixel_micron)
mean_length = in_.ToMicron(soma.skl_graph.mean_length, volume_pixel_micron)
median_length = in_.ToMicron(soma.skl_graph.median_length, volume_pixel_micron)
max_length = in_.ToMicron(soma.skl_graph.max_length, volume_pixel_micron)
std_length = np_.std(ext_lengths)
entropy_length = st_.entropy(ext_lengths)
print(
f"NODES DEGREES\n"
f"Highest degree (except soma) = {highest_degree}/{highest_degree_w_node}\n"
f"Min/Mean/Median/Max degree (except soma & leaves) = {min_degree} / {mean_degree} / {median_degree} / {max_degree}\n"
f"Standard deviation (except soma & leaves) = {std_degree}\n\n"
#
f"ALL EXTENSIONS\n Total Length = {total_ext_length} <- {ext_lengths}\n"
f" Min/Mean/Median/Max Length = {min_length} / {mean_length} / {median_length} / {max_length}\n"
f" Standard Deviation = {std_length} / Entropy = {entropy_length}")
# PRIMARY extensions
ext_lengths_P = list(soma.skl_graph.primary_edge_lengths(soma))
for length in ext_lengths_P:
ext_lengths_P[length] = in_.ToMicron(length, volume_pixel_micron)
# min, mean, median, max and standard deviation of the PRIMARY extensions
min_length_P = min(ext_lengths_P)
mean_length_P = np_.mean(ext_lengths_P)
median_length_P = np_.median(ext_lengths_P)
max_length_P = max(ext_lengths_P)
std_length_P = np_.std(ext_lengths_P)
entropy_length_P = st_.entropy(ext_lengths_P)
print(
f"PRIMARY EXTENSIONS\n Total Length = {total_ext_length_P}\n"
f" Min/Mean/Median/Max Length = {min_length_P} / {mean_length_P} / {median_length_P} / {max_length_P}\n"
f" Standard Deviation = {std_length_P} / Entropy = {entropy_length_P}")
if N_sec_ext > 0:
highest_degree = soma.skl_graph.max_degree # highest degree of the nodes except the soma
if highest_degree == 2:
highest_degree = 1
highest_degree_w_node = soma.skl_graph.highest_degree_w_nodes(soma) # highest degree of the nodes with the node coordinates except the soma
# min, mean, median, max and standard deviation of the degrees of non-leaves nodes
min_degree = soma.skl_graph.min_degree_except_leaves_and_roots
mean_degree = soma.skl_graph.mean_degree_except_leaves_and_roots
median_degree = soma.skl_graph.median_degree_except_leaves_and_roots
max_degree = soma.skl_graph.max_degree_except_leaves_an_roots
std_degree = soma.skl_graph.std_degree_except_leaves_and_roots
# SECONDARY extensions length
ext_lengths_S = list(soma.skl_graph.secondary_edge_lengths(soma))
for length in ext_lengths_S:
ext_lengths_S[length] = in_.ToMicron(length, volume_pixel_micron)
total_ext_length_S = sum(ext_lengths_S)
#
# min, mean, median, max and standard deviation of the PRIMARY extensions
min_length_S = min(ext_lengths_S)
mean_length_S = np_.mean(ext_lengths_S)
median_length_S = np_.median(ext_lengths_S)
max_length_S = max(ext_lengths_S)
std_length_S = np_.std(ext_lengths_S)
entropy_length_S = st_.entropy(ext_lengths_S)
print(
f"SECONDARY EXTENSIONS\n Total Length = {total_ext_length_S}\n"
f" Min/Mean/Median/Max Length = {min_length_S} / {mean_length_S} / {median_length_S} / {max_length_S}\n"
f" Standard Deviation = {std_length_S} / Entropy = {entropy_length_S}")
if N_sec_ext == 0:
highest_degree = 1
highest_degree_w_node = soma.skl_graph.highest_degree_w_nodes(
soma) # highest degree of the nodes with the node coordinates except the soma
# min, mean, median, max and standard deviation of the degrees of non-leaves nodes
min_degree = 1
mean_degree = 1
median_degree = 1
max_degree = 1
std_degree = 0
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
ext_lengths_S = None
total_ext_length_S = 0
min_length_S = None
mean_length_S = None
median_length_S = None
max_length_S = None
std_length_S = None
entropy_length_S = None
else:
ext_lengths = None
total_ext_length = 0
min_length = None
mean_length = None
median_length = None
max_length = None
std_length = None
entropy_length = None
ext_lengths_P = None
total_ext_length_P = 0
min_length_P = None
mean_length_P = None
median_length_P = None
max_length_P = None
std_length_P = None
entropy_length_P = None
ext_lengths_S = None
total_ext_length_S = 0
min_length_S = None
mean_length_S = None
median_length_S = None
max_length_S = None
std_length_S = None
entropy_length_S =None
somas_features_dict[f"soma {soma.uid}"] = [
Coef_V_soma__V_convex_hull,
Coef_axes_ellips_b__a,
Coef_axes_ellips_c__a,
N_nodes,
N_ext,
N_primary_ext,
N_sec_ext,
highest_degree,
min_degree,
mean_degree,
median_degree,
max_degree,
std_degree,
ext_lengths,
total_ext_length,
min_length,
mean_length,
median_length,
max_length,
std_length,
entropy_length,
# hist_length,
ext_lengths_P,
total_ext_length_P,
min_length_P,
mean_length_P,
median_length_P,
max_length_P,
std_length_P,
entropy_length_P,
# hist_length_P,
ext_lengths_S,
total_ext_length_S,
min_length_S,
mean_length_S,
median_length_S,
max_length_S,
std_length_S,
entropy_length_S,
# hist_length_S,
]
features_df = pd_.DataFrame.from_dict(somas_features_dict, orient="index", columns=columns)
# TODO FRANGI:WIDTH OF EXTENSIONS, CURVATURE EXTENSIONS
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")