Mentions légales du service

Skip to content
Snippets Groups Projects
nutrimorph.py 38.5 KiB
Newer Older
DEBREUVE Eric's avatar
DEBREUVE Eric committed
# Copyright CNRS/Inria/UNS
# Contributor(s): Eric Debreuve (since 2019), Morgane Nadal (2020)
DEBREUVE Eric's avatar
DEBREUVE Eric committed
#
# eric.debreuve@cnrs.fr
#
# This software is governed by the CeCILL  license under French law and
# abiding by the rules of distribution of free software.  You can  use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and  rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty  and the software's author,  the holder of the
# economic rights,  and the successive licensors  have only  limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading,  using,  modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean  that it is complicated to manipulate,  and  that  also
# therefore means  that it is reserved for developers  and  experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and,  more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.

# Time profiling:
#     python -m cProfile -o runtime/profiling.log -s name main.py
# Memory profiling:
#     python -m memory_profiler main.py
#     or
#     mprof run main.py
#     mprof plot

import brick.component.connection as cn_
import brick.component.extension as xt_
import brick.component.soma as sm_
import brick.general.feedback as fb_
DEBREUVE Eric's avatar
DEBREUVE Eric committed
import brick.processing.input as in_
import brick.processing.plot as po_
import brick.processing.map_labeling as ml_
# import brick.processing.image_verification as iv_
from sklgraph.skl_fgraph import skl_graph_t
from sklgraph.skl_map import skl_map_t
import brick.processing.graph_feat_extraction as ge_
import os as os_
import sys as sy_
DEBREUVE Eric's avatar
DEBREUVE Eric committed
import time as tm_
NADAL Morgane's avatar
NADAL Morgane committed
import pathlib
import psutil as mu_
DEBREUVE Eric's avatar
DEBREUVE Eric committed

DEBREUVE Eric's avatar
DEBREUVE Eric committed
import matplotlib.pyplot as pl_
import numpy as np_
from skimage.segmentation import relabel_sequential
import skimage.morphology as mp_
import skimage.measure as ms_
import skimage.exposure as ex_
import networkx as nx_
DEBREUVE Eric's avatar
DEBREUVE Eric committed

DEBREUVE Eric's avatar
DEBREUVE Eric committed
print(sy_.argv, sy_.argv.__len__())
DEBREUVE Eric's avatar
DEBREUVE Eric committed

if sy_.argv.__len__() < 2:
    print("Missing parameter file argument")
    sy_.exit(0)
if not (os_.path.isfile(sy_.argv[1]) and os_.access(sy_.argv[1], os_.R_OK)):
    print("Wrong parameter file path or parameter file unreadable")
    sy_.exit(0)

data_path = None
channel = None
size_voxel_in_micron = None
soma_low_c = None
soma_high_c = None
soma_selem_micron_c = None
soma_min_area_c = None
NADAL Morgane's avatar
NADAL Morgane committed
soma_max_area_c = None
NADAL Morgane's avatar
NADAL Morgane committed
adapt_hist_equalization = None
ext_low_c = None
ext_high_c = None
ext_selem_micron_c = None
ext_min_area_c = None
NADAL Morgane's avatar
NADAL Morgane committed
ext_max_length_c = None
max_straight_sq_dist_c = None
max_weighted_length_c = None
scale_range = None
scale_step = None
alpha = None
beta = None
frangi_c = None
bright_on_dark = None
method = None
hist_min_length = None
NADAL Morgane's avatar
NADAL Morgane committed
hist_step_length = None
number_of_bins_length = None
hist_bins_borders_length = None
NADAL Morgane's avatar
NADAL Morgane committed
hist_min_curvature = None
hist_step_curvature = None
number_of_bins_curvature = None
hist_bins_borders_curvature = None
exec(open(sy_.argv[1]).read())  # Only with search_parameters.py (stable version)
soma_t = sm_.soma_t
extension_t = xt_.extension_t
DEBREUVE Eric's avatar
DEBREUVE Eric committed

print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
start_time = tm_.time()

# TODO add proper warning with warn module
# --- Images

# Find the name of the file, eq. to the biological tested condition number x
name_file = os_.path.basename(data_path)
print("FILE: ", name_file)
name_file = name_file.replace(".tif", "")

name_dir = os_.path.dirname(data_path)
print("DIR: ", name_dir)

# Find the dimension of the image voxels in micron
# TODO do something more general, here too specific to one microscope metadata
size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)

# Read the image
image = io_.volread(data_path)

# Image size verification - simple version without user interface
image = in_.ImageVerification(image, channel)

# iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
# /!\ conflicts between some versions of PySide2 and Python3

image = image
img_shape = image.shape

#
print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")

# Intensity relative normalization (between 0 and 1).
# Image for soma normalized for hysteresis
image_for_soma = in_.IntensityNormalizedImage(image)
# Image for extensions not normalized for frangi enhancement
image_for_ext = image.copy()

print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")

# --- Initialization
som_nfo = {}
ext_nfo = {}
axes = {}

#
# --- Somas
print("\n--- Soma Detection")

# Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
soma_max_area_c = in_.ToPixel(soma_max_area_c, size_voxel_in_micron, dimension=(0, 1))
soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))

# - Create the maps for enhancing and detecting the soma
# Hysteresis thresholding and closing/opening
som_nfo["map_small"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
# Deletion of too small elements
som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(som_nfo["map_small"], soma_min_area_c)
# Deletion of too big elements (probably aggregation of cells)
# Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(som_nfo["map_small"], som_lmp_small, soma_max_area_c)

# Labelling of somas, find the number of somas
# som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)

# Use relabel instead of label to optimize the algorithm. /!\ But BUG.
som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
n_somas = som_nfo["lmp"].max()

id_soma = 1
for id in range(n_somas + 1):
    if id != id_soma:
        print(id)
        som_nfo["lmp"][som_nfo["lmp"] == id] = 0
som_nfo["lmp"], n_somas = ms_.label(som_nfo["lmp"], return_num=True)

# po_.MaximumIntensityProjectionZ(som_nfo["lmp"])

# Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])

# po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])

# Create the tuple somas, containing all the objects 'soma'
somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))

print(f"    n = {n_somas}")

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomas(somas, som_nfo, axes)

#
# -- Extentions
print("\n--- Extension Detection")

# Set to zeros the pixels belonging to the somas.
# Take into account the aggregation of cells detected as one big soma to avoid extension creation there
del_soma = (som_nfo["map_small"] > 0).nonzero()
image_for_ext[del_soma] = 0

# Change the extensions parameters from micron to pixel dimensions
ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
ext_max_length_c = in_.ToPixel(ext_max_length_c, size_voxel_in_micron, dimension=(0,))

if ext_selem_micron_c == 0:
    ext_selem_pixel_c = None
else:
    ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))

# scale_range_pixel = []
# #
# for value in scale_range:
#     value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
#     scale_range_pixel.append(value_in_pixel)
# scale_range = tuple(scale_range_pixel)
#
# scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)

# - Perform frangi feature enhancement (via python or c - faster - implementation)
enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
    image_for_ext,
    scale_range,
    scale_step,
    alpha,
    beta,
    frangi_c,
    bright_on_dark,
    method,
    diff_mode=diff_mode,
    in_parallel=in_parallel)

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")

# po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")

# - Creation of the enhanced maps
# enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment

# Enhanced the contrast in frangi output
if adapt_hist_equalization:
    # necessary but not sufficient, histogram equalisation (adaptative)
    enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
else:
    # necessary but not sufficient, histogram equalisation (global)
    enhanced_ext = ex_.equalize_hist(enhanced_ext)

# po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")

# Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
# Hysteresis thersholding
ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
# Deletion of too small elements
ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
# Creation of the extensions skeleton
ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
# ext_nfo["map"] = extension_t.FilteredSkeletonMap(ext_nfo["coarse_map"], ext_max_length_c)
# po_.MaximumIntensityProjectionZ(ext_nfo["map"])
# Deletion of extensions found within the somas
ext_nfo["map"][som_nfo["map"] > 0] = 0
# Labelling of extensions and number of extensions determined
ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)

# Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
# ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
# n_extensions = ext_nfo["lmp"].max()

# Create the tuple extensions, containing all the objects 'extension'
extensions = tuple(
    extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
    for uid in range(1, n_extensions + 1))

print(f"    n = {n_extensions}")

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotExtensions(extensions, ext_nfo, img_shape)

# -- Preparation for Connections
# Calculate the costs of each pixel on the image
dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])

# -- Soma-Extention
print("\n--- Soma <-> Extension")

# Change the extensions parameters from micron to pixel dimensions
max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)

# Find the candidate extensions, with their endpoints, for connexion to the somas
candidate_conn_nfo = cn_.CandidateConnections(
    somas,
    som_nfo["influence_map"],
    som_nfo["dist_to_closest"],
    extensions,
    max_straight_sq_dist_c,
)

# For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
for ep_idx, soma, extension, end_point in candidate_conn_nfo:
    # Only try to connect if the extension is not already connected
    if extension.is_unconnected:
        print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
        # Use of Dijkstra shortest weighted path
        path, length = cn_.ShortestPathFromToN(
            start_time,
            end_point,
            dijkstra_costs,
            soma.ContourPointsCloseTo,
            max_straight_sq_dist=max_straight_sq_dist_c,
        )

        # Keep the connexion only if inferior to the allowed max weighted distance
        if length <= max_weighted_length_c:
            cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
            print(": Made")
        else:
            print("")

# for soma in somas:
#     soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)

fb_.PrintConnectedExtensions(extensions)

#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomasWithExtensions(somas, som_nfo, "all")

# -- Extention-Extention
print("\n--- Extension <-> Extension")

should_look_for_connections = True

# Update the soma maps with next Ext-Ext connexions
while should_look_for_connections:
    # Update the final somas + ext map by adding the new extensions and their connexion paths
    som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
    # Update the influence map
    som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["soma_w_ext_lmp"])

    # Find the candidate extensions for connexion to the primary extensions
    candidate_conn_nfo = cn_.CandidateConnections(
        somas,
        som_nfo["influence_map"],
        som_nfo["dist_to_closest"],
        extensions,
        max_straight_sq_dist_c,
    )
    should_look_for_connections = False

    # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
    for ep_idx, soma, extension, end_point in candidate_conn_nfo:
        # Only try to connect if the extension is not already connected
        if extension.is_unconnected:
            print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
DEBREUVE Eric's avatar
DEBREUVE Eric committed
            path, length = cn_.ShortestPathFromToN(
DEBREUVE Eric's avatar
DEBREUVE Eric committed
                end_point,
DEBREUVE Eric's avatar
DEBREUVE Eric committed
                dijkstra_costs,
                soma.ExtensionPointsCloseTo,
                max_straight_sq_dist=max_straight_sq_dist_c,
DEBREUVE Eric's avatar
DEBREUVE Eric committed
            )

            # Keep the connexion only if inferior to the allowed max weighted distance
            if length <= max_weighted_length_c:
                # Verify that the last element of the connexion path is contained in the extension to be connected
                # If so, returns the extensions containing the path last site.
                tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
                # Validate connexion: update the glial component objects and store more info in their fields
                cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)

                should_look_for_connections = True

DEBREUVE Eric's avatar
DEBREUVE Eric committed
                print(": Made")
            else:
                print("")

# Create an extension map containing all the ext + connexions, without the somas.
# Used for the graphs extraction
ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp'] - som_nfo['lmp']
#
fb_.PrintConnectedExtensions(extensions)
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")

# -- Summary
print("\n")
for soma in somas:
    print(soma)

# snapshot = tr_.take_snapshot()
# top_file_stats = snapshot.statistics('lineno')
# print("Memory Profiling: Top 10 FILES")
# for stat in top_file_stats[:10]:
#     print(stat)
# top_block_stats = snapshot.statistics('traceback')
# top_block_stats = top_block_stats[0]
# print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
# for line in top_block_stats.traceback.format():
#     print(line)

elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")

if with_plot:
    pl_.show()

po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
# , output_image_file_name=f"D:\\MorganeNadal\\Results\\Images\\{name_file}.png")

# --- Extract all the extensions of all somas as a graph
print('\n--- Graph extraction')

# Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
for soma in somas:
    print(f" Soma {soma.uid}", end="")
    # Create SKLGraph skeletonized map
    ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid,
                                     store_widths=True,
                                     skeletonize=False,
                                     do_post_thinning=False)
    # do_post_thinning = True, in order to remove pixels that are not breaking connectivity

    # Create the graph from the SKLGaph skeletonized map
    soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, size_voxel=size_voxel_in_micron)

    # --- Find the root of the {ext+conn} graphs.
    # Roots are the nodes of degree 1 that are to be linked to the soma
    soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)

    # Add a node "soma" and link it to the root nodes
    soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
    soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)

    for node in soma.graph_roots.values():
        soma.skl_graph.add_edge(node, soma_node, root=True)

    # nx_.draw_networkx(soma.skl_graph)
    # pl_.show(block=True)
    # pl_.close()

    print(": Done")

elapsed_time = tm_.gmtime(tm_.time() - start_time)

# --- Extract features
print('\n--- Features Extraction\n')

# Parameters
if hist_bins_borders_length is None:
    number_of_bins_length = int(number_of_bins_length)
    bins_length = np_.linspace(hist_min_length, hist_min_length + hist_step_length * number_of_bins_length,
                               num=number_of_bins_length)
    bins_length[-1] = np_.inf
else:
    bins_length = np_.array(hist_bins_borders_length)
    bins_length[-1] = np_.inf

if hist_bins_borders_curvature is None:
    number_of_bins_curvature = int(number_of_bins_curvature)
    bins_curvature = np_.linspace(hist_min_curvature,
                                  hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
                                  num=number_of_bins_curvature)
    bins_curvature[-1] = np_.inf
else:
    bins_curvature = np_.array(hist_bins_borders_curvature)
    bins_curvature[-1] = np_.inf

# Pandas dataframe creation with all the measured features
features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
                                      ext_scales)

# Save the pandas df into .csv
features_df.to_csv(f"{name_dir}\\{name_file}.csv")
515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982
#
elapsed_time = tm_.gmtime(tm_.time() - start_time)
print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")


#########----------- Executable version with files AND repositories ------------###########

# def NutriMorph(data_path: str,
#         channel=channel,
#         size_voxel_in_micron=size_voxel_in_micron,
#         soma_low_c=soma_low_c,
#         soma_high_c=soma_high_c,
#         soma_selem_micron_c=soma_selem_micron_c,
#         soma_min_area_c=soma_min_area_c,
#         soma_max_area_c=soma_max_area_c,
#         adapt_hist_equalization=adapt_hist_equalization,
#         ext_low_c=ext_low_c,
#         ext_high_c=ext_high_c,
#         ext_selem_micron_c=ext_selem_micron_c,
#         ext_min_area_c=ext_min_area_c,
#         ext_max_length_c=ext_max_length_c,
#         max_straight_sq_dist_c=max_straight_sq_dist_c,
#         max_weighted_length_c=max_weighted_length_c,
#         scale_range=scale_range,
#         scale_step=scale_step,
#         alpha=alpha,
#         beta=beta,
#         frangi_c=frangi_c,
#         diff_mode=diff_mode,
#         bright_on_dark=bright_on_dark,
#         method=method,
#         hist_min_length=hist_min_length,
#         hist_step_length=hist_step_length,
#         number_of_bins_length=number_of_bins_length,
#         hist_bins_borders_length=hist_bins_borders_length,
#         hist_min_curvature=hist_min_curvature,
#         hist_step_curvature=hist_step_curvature,
#         number_of_bins_curvature=number_of_bins_curvature,
#         hist_bins_borders_curvature=hist_bins_borders_curvature,
#         with_plot=with_plot,
#         in_parallel=in_parallel) -> pd_.DataFrame():
#
#     soma_t = sm_.soma_t
#     extension_t = xt_.extension_t
#
#     print(f"STARTED: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
#     start_time = tm_.time()
#
#     # TODO add proper warning with warn module
#     # --- Images
#
#     # Find the name of the file, eq. to the biological tested condition number x
#     name_file = os_.path.basename(data_path)
#     print("FILE: ", name_file)
#     name_file = name_file.replace(".tif", "")
#
#     name_dir = os_.path.dirname(data_path)
#     print("DIR: ", name_dir)
#
#     # Find the dimension of the image voxels in micron
#     # TODO do something more general, here too specific to one microscope metadata
#     size_voxel_in_micron = in_.FindVoxelDimensionInMicron(data_path, size_voxel_in_micron=size_voxel_in_micron)
#
#     # Read the image
#     image = io_.volread(data_path)
#
#     # Image size verification - simple version without user interface
#     image = in_.ImageVerification(image, channel)
#
#     # iv_.image_verification(image, channel)  # -> PySide2 user interface  # TODO: must return the modified image!
#     # /!\ conflicts between some versions of PySide2 and Python3
#
#     image = image[:, 500:, 500:]
#     img_shape = image.shape
#
#     #
#     print(f"IMAGE: s.{img_shape} t.{image.dtype} m.{image.min()} M.{image.max()}")
#
#     # Intensity relative normalization (between 0 and 1).
#     # Image for soma normalized for hysteresis
#     image_for_soma = in_.IntensityNormalizedImage(image)
#     # Image for extensions not normalized for frangi enhancement
#     image_for_ext = image.copy()
#
#     print(f"NRM-IMG: t.{image_for_soma.dtype} m.{image_for_soma.min():.2f} M.{image_for_soma.max():.2f}")
#
#     # --- Initialization
#     som_nfo = {}
#     ext_nfo = {}
#     axes = {}
#
#     #
#     # --- Somas
#     print("\n--- Soma Detection")
#
#     # Change the soma parameters from micron to pixel dimensions, using the previously determined voxel size
#     soma_min_area_c = in_.ToPixel(soma_min_area_c, size_voxel_in_micron, dimension=(0, 1))
#     soma_max_area_c = in_.ToPixel(soma_max_area_c, size_voxel_in_micron, dimension=(0, 1))
#     soma_selem_c = mp_.disk(in_.ToPixel(soma_selem_micron_c, size_voxel_in_micron))
#
#     # - Create the maps for enhancing and detecting the soma
#     # Hysteresis thresholding and closing/opening
#     som_nfo["map_small"] = soma_t.Map(image_for_soma, soma_low_c, soma_high_c, soma_selem_c)
#     # Deletion of too small elements
#     som_nfo["map_small"], som_lmp_small = soma_t.DeleteSmallSomas(som_nfo["map_small"], soma_min_area_c)
#     # Deletion of too big elements (probably aggregation of cells)
#     # Separated from the deletion of small elements to avoid extensions to be detected where too big somas were just deleted
#     som_nfo["map"], som_lmp = soma_t.DeleteBigSomas(som_nfo["map_small"], som_lmp_small, soma_max_area_c)
#
#     # Labelling of somas, find the number of somas
#     # som_nfo["lmp"], n_somas = ms_.label(som_nfo["map"], return_num=True)
#
#     # Use relabel instead of label to optimize the algorithm. /!\ But BUG.
#     som_nfo["lmp"] = relabel_sequential(som_lmp)[0]
#     n_somas = som_nfo["lmp"].max()
#
#     # po_.MaximumIntensityProjectionZ(som_nfo["lmp"])
#
#     # Create the distance and influence maps used next for connexions between somas-extensions and extensions-extensions
#     som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["lmp"])
#
#     # po_.MaximumIntensityProjectionZ(som_nfo["influence_map"])
#
#     # Create the tuple somas, containing all the objects 'soma'
#     somas = tuple(soma_t().FromMap(som_nfo["lmp"], uid) for uid in range(1, n_somas + 1))
#
#     print(f"    n = {n_somas}")
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomas(somas, som_nfo, axes)
#
#     #
#     # -- Extentions
#     print("\n--- Extension Detection")
#
#     # Set to zeros the pixels belonging to the somas.
#     # Take into account the aggregation of cells detected as one big soma to avoid extension creation there
#     del_soma = (som_nfo["map_small"] > 0).nonzero()
#     image_for_ext[del_soma] = 0
#
#     # Change the extensions parameters from micron to pixel dimensions
#     ext_min_area_c = in_.ToPixel(ext_min_area_c, size_voxel_in_micron, dimension=(0, 1))
#     ext_max_length_c = in_.ToPixel(ext_max_length_c, size_voxel_in_micron, dimension=(0,))
#
#     if ext_selem_micron_c == 0:
#         ext_selem_pixel_c = None
#     else:
#         ext_selem_pixel_c = mp_.disk(in_.ToPixel(ext_selem_micron_c, size_voxel_in_micron))
#
#     # scale_range_pixel = []
#     # #
#     # for value in scale_range:
#     #     value_in_pixel = in_.ToPixel(value, size_voxel_in_micron, decimals=1)
#     #     scale_range_pixel.append(value_in_pixel)
#     # scale_range = tuple(scale_range_pixel)
#     #
#     # scale_step = in_.ToPixel(scale_step, size_voxel_in_micron, decimals=1)
#
#     # - Perform frangi feature enhancement (via python or c - faster - implementation)
#     enhanced_ext, ext_scales = extension_t.EnhancedForDetection(
#         image_for_ext,
#         scale_range,
#         scale_step,
#         alpha,
#         beta,
#         frangi_c,
#         bright_on_dark,
#         method,
#         diff_mode=diff_mode,
#         in_parallel=in_parallel)
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"Elapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}\n")
#
#     # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
#
#     # - Creation of the enhanced maps
#     # enhanced_ext = ex_.adjust_log(enhanced_ext, 1)  # Not necessary, log contrast adjustment
#
#     # Enhanced the contrast in frangi output
#     if adapt_hist_equalization:
#         # necessary but not sufficient, histogram equalisation (adaptative)
#         enhanced_ext = ex_.equalize_adapthist(enhanced_ext)
#     else:
#         # necessary but not sufficient, histogram equalisation (global)
#         enhanced_ext = ex_.equalize_hist(enhanced_ext)
#
#     # po_.MaximumIntensityProjectionZ(enhanced_ext, cmap="OrRd")
#
#     # Rescale the image by stretching the intensities between 0 and 255, necessary but not sufficient
#     enhanced_ext = ex_.rescale_intensity(enhanced_ext, out_range=(0, 255))
#     # Hysteresis thersholding
#     ext_nfo["coarse_map"] = extension_t.CoarseMap(enhanced_ext, ext_low_c, ext_high_c, ext_selem_pixel_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
#     # Deletion of too small elements
#     ext_nfo["coarse_map"], ext_lmp = extension_t.FilteredCoarseMap(ext_nfo["coarse_map"], ext_min_area_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["coarse_map"])
#     # Creation of the extensions skeleton
#     ext_nfo["map"] = extension_t.FineMapFromCoarseMap(ext_nfo["coarse_map"])
#     # TODO delete too long extensions
#     # ext_nfo["map"] = extension_t.FilteredSkeletonMap(ext_nfo["coarse_map"], ext_max_length_c)
#     # po_.MaximumIntensityProjectionZ(ext_nfo["map"])
#     # Deletion of extensions found within the somas
#     ext_nfo["map"][som_nfo["map"] > 0] = 0
#     # Labelling of extensions and number of extensions determined
#     ext_nfo["lmp"], n_extensions = ms_.label(ext_nfo["map"], return_num=True)
#
#     # Use relabel instead of label to optimize the algorithm. BUT PROBLEM WITH THE NUMBER OF EXTENSIONS DETECTED !
#     # ext_nfo["lmp"] = relabel_sequential(ext_lmp)[0]
#     # n_extensions = ext_nfo["lmp"].max()
#
#     # Create the tuple extensions, containing all the objects 'extension'
#     extensions = tuple(
#         extension_t().FromMap(ext_nfo["lmp"], ext_scales, uid)
#         for uid in range(1, n_extensions + 1))
#
#     print(f"    n = {n_extensions}")
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotExtensions(extensions, ext_nfo, img_shape)
#
#     # -- Preparation for Connections
#     # Calculate the costs of each pixel on the image
#     dijkstra_costs = in_.DijkstraCosts(image, som_nfo["map"], ext_nfo["map"])
#
#     # -- Soma-Extention
#     print("\n--- Soma <-> Extension")
#
#     # Change the extensions parameters from micron to pixel dimensions
#     max_straight_sq_dist_c = in_.ToPixel(max_straight_sq_dist_c, size_voxel_in_micron)
#     max_weighted_length_c = in_.ToPixel(max_weighted_length_c, size_voxel_in_micron)
#
#     # Find the candidate extensions, with their endpoints, for connexion to the somas
#     candidate_conn_nfo = cn_.CandidateConnections(
#         somas,
#         som_nfo["influence_map"],
#         som_nfo["dist_to_closest"],
#         extensions,
#         max_straight_sq_dist_c,
#     )
#
#     # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
#     for ep_idx, soma, extension, end_point in candidate_conn_nfo:
#         # Only try to connect if the extension is not already connected
#         if extension.is_unconnected:
#             print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
#             # Use of Dijkstra shortest weighted path
#             path, length = cn_.ShortestPathFromToN(
#                 start_time,
#                 end_point,
#                 dijkstra_costs,
#                 soma.ContourPointsCloseTo,
#                 max_straight_sq_dist=max_straight_sq_dist_c,
#             )
#
#             # Keep the connexion only if inferior to the allowed max weighted distance
#             if length <= max_weighted_length_c:
#                 cn_.ValidateConnection(soma, extension, end_point, path, dijkstra_costs)
#                 print(": Made")
#             else:
#                 print("")
#
#     # for soma in somas:
#     #     soma.Extend(extensions, som_nfo["dist_to_closest"], dijkstra_costs)
#
#     fb_.PrintConnectedExtensions(extensions)
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomasWithExtensions(somas, som_nfo, "all")
#
#     # -- Extention-Extention
#     print("\n--- Extension <-> Extension")
#
#     should_look_for_connections = True
#
#     # Update the soma maps with next Ext-Ext connexions
#     while should_look_for_connections:
#         # Update the final somas + ext map by adding the new extensions and their connexion paths
#         som_nfo["soma_w_ext_lmp"] = soma_t.SomasLMap(somas)
#         # Update the influence map
#         som_nfo["dist_to_closest"], som_nfo["influence_map"] = soma_t.InfluenceMaps(som_nfo["soma_w_ext_lmp"])
#
#         # Find the candidate extensions for connexion to the primary extensions
#         candidate_conn_nfo = cn_.CandidateConnections(
#             somas,
#             som_nfo["influence_map"],
#             som_nfo["dist_to_closest"],
#             extensions,
#             max_straight_sq_dist_c,
#         )
#
#         should_look_for_connections = False
#
#         # For each candidate, verify that it is valid based on the shortest path and the maximum allowed straight distance
#         for ep_idx, soma, extension, end_point in candidate_conn_nfo:
#             # Only try to connect if the extension is not already connected
#             if extension.is_unconnected:
#                 print(f"    Soma.{soma.uid} <-?-> Ext.{extension.uid}({ep_idx})", end="")
#                 # Use of Dijkstra shortest weighted path
#                 path, length = cn_.ShortestPathFromToN(
#                     start_time,
#                     end_point,
#                     dijkstra_costs,
#                     soma.ExtensionPointsCloseTo,
#                     max_straight_sq_dist=max_straight_sq_dist_c,
#                 )
#
#                 # Keep the connexion only if inferior to the allowed max weighted distance
#                 if length <= max_weighted_length_c:
#                     # Verify that the last element of the connexion path is contained in the extension to be connected
#                     # If so, returns the extensions containing the path last site.
#                     tgt_extenstion = extension_t.ExtensionContainingSite(extensions, path[-1])
#                     # Validate connexion: update the glial component objects and store more info in their fields
#                     cn_.ValidateConnection(tgt_extenstion, extension, end_point, path, dijkstra_costs)
#                     # TODO delete too long extensions
#
#                     should_look_for_connections = True
#
#                     print(": Made")
#                 else:
#                     print("")
#
#     # Create an extension map containing all the ext + connexions, without the somas.
#     # Used for the graphs extraction
#     ext_nfo["lmp_soma"] = som_nfo['soma_w_ext_lmp'] - som_nfo['lmp']
#
#     fb_.PrintConnectedExtensions(extensions)
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         fb_.PlotSomasWithExtensions(somas, som_nfo, "with_ext_of_ext")
#
#     # -- Summary
#     print("\n")
#     for soma in somas:
#         print(soma)
#
#     # snapshot = tr_.take_snapshot()
#     # top_file_stats = snapshot.statistics('lineno')
#     # print("Memory Profiling: Top 10 FILES")
#     # for stat in top_file_stats[:10]:
#     #     print(stat)
#     # top_block_stats = snapshot.statistics('traceback')
#     # top_block_stats = top_block_stats[0]
#     # print(f"Memory Profiling: {top_block_stats.count} memory blocks: {top_block_stats.size / 1024:.1f} KiB")
#     # for line in top_block_stats.traceback.format():
#     #     print(line)
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#
#     if with_plot:
#         pl_.show()
#
#     po_.MaximumIntensityProjectionZ(som_nfo['soma_w_ext_lmp'])
#     # , output_image_file_name=f"D:\\MorganeNadal\\Results\\Images\\{name_file}.png")
#
#     # --- Extract all the extensions of all somas as a graph
#     print('\n--- Graph extraction')
#
#     # Create the graphs with SKLGraph module (available on Eric Debreuve Gitlab)
#     for soma in somas:
#         print(f" Soma {soma.uid}", end="")
#         # Create SKLGraph skeletonized map
#         ext_map = skl_map_t.FromShapeMap(ext_nfo['lmp_soma'] == soma.uid, store_widths=True, skeletonize=False,
#                                          do_post_thinning=True)
#         # do_post_thinning = True, in order to remove pixels that are not breaking connectivity
#
#         # Create the graph from the SKLGaph skeletonized map
#         soma.skl_graph = skl_graph_t.FromSkeleton(ext_map, size_voxel=size_voxel_in_micron)
#
#         # --- Find the root of the {ext+conn} graphs.
#         # Roots are the nodes of degree 1 that are to be linked to the soma
#         soma.graph_roots = ge_.FindGraphsRootWithNodes(soma)
#
#         # Add a node "soma" and link it to the root nodes
#         soma_node = f"S-{int(soma.centroid[0])}-{int(soma.centroid[1])}-{int(soma.centroid[2])}"
#         soma.skl_graph.add_node(soma_node, soma=True, soma_nfo=soma)
#
#         for node in soma.graph_roots.values():
#             soma.skl_graph.add_edge(node, soma_node, root=True)
#         #
#         # nx_.draw_networkx(soma.skl_graph)
#         # if with_plot:
#         #     pl_.show(block=True)
#         #
#         print(": Done")
#
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#
#     # --- Extract features
#     print('\n--- Features Extraction\n')
#
#     # Parameters
#     if hist_bins_borders_length is None:
#         number_of_bins_length = int(number_of_bins_length)
#         bins_length = np_.linspace(hist_min_length, hist_min_length + hist_step_length * number_of_bins_length,
#                                    num=number_of_bins_length)
#         bins_length[-1] = np_.inf
#     else:
#         bins_length = np_.array(hist_bins_borders_length)
#         bins_length[-1] = np_.inf
#
#     if hist_bins_borders_curvature is None:
#         number_of_bins_curvature = int(number_of_bins_curvature)
#         bins_curvature = np_.linspace(hist_min_curvature,
#                                       hist_min_curvature + hist_step_curvature * number_of_bins_curvature,
#                                       num=number_of_bins_curvature)
#         bins_curvature[-1] = np_.inf
#     else:
#         bins_curvature = np_.array(hist_bins_borders_curvature)
#         bins_curvature[-1] = np_.inf
#
#     # Pandas dataframe creation with all the measured features
#     features_df = ge_.ExtractFeaturesInDF(name_file, somas, size_voxel_in_micron, bins_length, bins_curvature,
#                                           ext_scales)
#
#     # Save the pandas df into .csv
#     features_df.to_csv(f"{name_dir}\\{name_file}.csv")
#
#     #
#     elapsed_time = tm_.gmtime(tm_.time() - start_time)
#     print(f"\nElapsed Time={tm_.strftime('%Hh %Mm %Ss', elapsed_time)}")
#     print(f"DONE: {tm_.strftime('%a, %b %d %Y @ %H:%M:%S')}")
#
#     return features_df
#
#
# if __name__ == '__main__':
#
#     # --- Extract cell graphs and features from microscope images using NutriMorph function.
#     #
#     # Differentiate between path to a tiff file or to a repository
#     if pathlib.Path(data_path).is_file():
#         # Perform NutriMorph algorithm on the file entered in parameters
#         features_df = NutriMorph(data_path)
#
#     elif pathlib.Path(data_path).is_dir():
#         # Keep the directory to the repository
#         name_dir = os_.path.dirname(data_path)
#         # Initialize the future concatenated features
#         concatenated_features_df = pd_.DataFrame()
#         # Find all the tiff files in the parent and child repositories
#         for path in pathlib.Path(data_path).glob("**/*.tif"):
#             if path.is_file():
#                 # Perform NutriMorph algorithm
#                 features_df = NutriMorph(path)
#                 # Concatenate all the dataframes
#                 concatenated_features_df = concatenated_features_df.append(features_df)
#         # Save to .csv in the parent repository
#         concatenated_features_df.to_csv(f"{data_path}_features.csv")
#
#     # --- TODO Clustering with this df and module cell_clustering.py